Librerias y dependencias¶

In [ ]:
import tensorflow as tf
import os 

import cv2
import imghdr

import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
from matplotlib import rcParams

# Librerias para implementar el modelo de deep learning
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Conv2D, MaxPooling2D, Dense,Flatten, Dropout, BatchNormalization, MaxPool2D, GlobalAveragePooling2D
from tensorflow.keras import regularizers, Model
from tensorflow.keras.optimizers import Adam, Adamax,RMSprop

from sklearn.model_selection import train_test_split
from sklearn.metrics import confusion_matrix, roc_auc_score, roc_curve

from keras.utils.vis_utils import plot_model
2023-07-11 22:00:30.547094: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations:  SSE4.1 SSE4.2
To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.

Por si queremos ponerlo para evitar out of memory vamos a limitar el creciente consumo de la memoria del GPU

gpus = tf.config.experimental.list_physical_devices('GPU')

print(gpus)

for gpu in gpus: tf.config.experimental.set_memory_growth(gpu, True)

Carga de datos y comprobación de datos anormales¶

Directorio de donde están nuestros datos

In [ ]:
data_dir = 'chest_xray'
train_dir = os.path.join(data_dir, 'train')
val_dir = os.path.join(data_dir, 'val')
test_dir = os.path.join(data_dir, 'test')

os.listdir, devuelve una lista con todos los elementos dentro de una carpeta

In [ ]:
os.listdir(train_dir)
Out[ ]:
['.DS_Store', 'PNEUMONIA', 'NORMAL']

Comprobamos que podemos cargar todas las imágenes sin problemas y que están en el formato correcto

In [ ]:
listadeforma = []
media_canales = []
for image_class in os.listdir(train_dir):
    if (image_class=="PNEUMONIA" or image_class=="NORMAL"):
        for image in os.listdir(os.path.join(train_dir, image_class)):
            image_path = os.path.join(train_dir,image_class, image)
            try:
                img = cv2.imread(image_path)  
                media_canales.append(cv2.mean(img))
                
                listadeforma.append(img.shape)
            except:
                print("Error" .format(image_path))
Error

Media de la imagen

In [ ]:
cv2.mean(img)
Out[ ]:
(122.88759274473297, 122.88759274473297, 122.88759274473297, 0.0)

La lista de los tamaños de las imagenes

In [ ]:
listadeforma
Out[ ]:
[(736, 1048, 3),
 (672, 984, 3),
 (712, 992, 3),
 (888, 1224, 3),
 (480, 864, 3),
 (584, 944, 3),
 (1110, 1816, 3),
 (1024, 1408, 3),
 (552, 1200, 3),
 (1256, 1336, 3),
 (1040, 1400, 3),
 (608, 1016, 3),
 (936, 1360, 3),
 (1056, 1576, 3),
 (560, 850, 3),
 (866, 1186, 3),
 (600, 976, 3),
 (528, 872, 3),
 (920, 1264, 3),
 (912, 1368, 3),
 (712, 1192, 3),
 (576, 936, 3),
 (656, 1224, 3),
 (728, 1240, 3),
 (560, 976, 3),
 (632, 1064, 3),
 (127, 384, 3),
 (1008, 1136, 3),
 (808, 1184, 3),
 (624, 1096, 3),
 (608, 1008, 3),
 (867, 1302, 3),
 (848, 1206, 3),
 (1288, 1624, 3),
 (1024, 1376, 3),
 (784, 1280, 3),
 (1088, 1384, 3),
 (536, 888, 3),
 (784, 1176, 3),
 (1112, 1576, 3),
 (896, 1400, 3),
 (537, 711, 3),
 (728, 1208, 3),
 (1187, 1690, 3),
 (872, 1112, 3),
 (928, 1368, 3),
 (832, 1296, 3),
 (1056, 1504, 3),
 (584, 1080, 3),
 (1048, 1656, 3),
 (704, 1184, 3),
 (1052, 1526, 3),
 (552, 768, 3),
 (844, 1164, 3),
 (840, 1368, 3),
 (704, 912, 3),
 (784, 1096, 3),
 (808, 1120, 3),
 (712, 1312, 3),
 (888, 1448, 3),
 (960, 1256, 3),
 (800, 952, 3),
 (824, 1024, 3),
 (1112, 1440, 3),
 (653, 1287, 3),
 (880, 1232, 3),
 (960, 1160, 3),
 (720, 976, 3),
 (704, 1112, 3),
 (424, 936, 3),
 (632, 888, 3),
 (744, 1160, 3),
 (1006, 1404, 3),
 (656, 968, 3),
 (475, 856, 3),
 (936, 1328, 3),
 (648, 1072, 3),
 (568, 920, 3),
 (608, 1168, 3),
 (469, 796, 3),
 (808, 1384, 3),
 (1168, 1440, 3),
 (944, 1368, 3),
 (712, 984, 3),
 (1152, 1216, 3),
 (648, 880, 3),
 (632, 976, 3),
 (848, 904, 3),
 (840, 1264, 3),
 (728, 1288, 3),
 (1016, 1168, 3),
 (882, 1226, 3),
 (902, 1294, 3),
 (624, 1056, 3),
 (600, 1104, 3),
 (1056, 1456, 3),
 (544, 856, 3),
 (672, 976, 3),
 (624, 856, 3),
 (824, 1248, 3),
 (1488, 1672, 3),
 (648, 1072, 3),
 (832, 1392, 3),
 (1744, 1904, 3),
 (1424, 2000, 3),
 (992, 1504, 3),
 (608, 1168, 3),
 (618, 759, 3),
 (1128, 1624, 3),
 (1112, 1312, 3),
 (664, 1072, 3),
 (1008, 1216, 3),
 (808, 1056, 3),
 (1137, 1480, 3),
 (621, 1111, 3),
 (784, 1200, 3),
 (936, 1272, 3),
 (658, 1082, 3),
 (704, 1008, 3),
 (720, 936, 3),
 (824, 1312, 3),
 (792, 1256, 3),
 (568, 968, 3),
 (188, 499, 3),
 (992, 1320, 3),
 (856, 1408, 3),
 (574, 863, 3),
 (816, 1000, 3),
 (548, 796, 3),
 (952, 1422, 3),
 (888, 1144, 3),
 (688, 1136, 3),
 (1002, 1510, 3),
 (968, 1240, 3),
 (816, 1152, 3),
 (712, 1040, 3),
 (840, 1200, 3),
 (584, 984, 3),
 (1088, 1384, 3),
 (1024, 1424, 3),
 (1304, 1624, 3),
 (728, 1216, 3),
 (744, 1136, 3),
 (808, 1200, 3),
 (808, 1472, 3),
 (872, 1192, 3),
 (792, 1128, 3),
 (1056, 1512, 3),
 (1040, 1320, 3),
 (334, 649, 3),
 (800, 1312, 3),
 (685, 1010, 3),
 (1152, 1248, 3),
 (640, 848, 3),
 (798, 1122, 3),
 (533, 937, 3),
 (904, 1248, 3),
 (648, 1016, 3),
 (760, 1104, 3),
 (976, 1456, 3),
 (760, 1264, 3),
 (576, 888, 3),
 (672, 1208, 3),
 (1088, 1400, 3),
 (608, 992, 3),
 (736, 1304, 3),
 (513, 907, 3),
 (1032, 1320, 3),
 (1024, 1384, 3),
 (648, 1016, 3),
 (1404, 1582, 3),
 (632, 832, 3),
 (712, 1080, 3),
 (1044, 1827, 3),
 (984, 1600, 3),
 (1128, 1416, 3),
 (952, 1208, 3),
 (600, 1064, 3),
 (392, 928, 3),
 (904, 1296, 3),
 (664, 1048, 3),
 (716, 1048, 3),
 (632, 1160, 3),
 (672, 920, 3),
 (768, 1224, 3),
 (747, 998, 3),
 (872, 1256, 3),
 (840, 1200, 3),
 (752, 1320, 3),
 (588, 932, 3),
 (656, 1008, 3),
 (1712, 1768, 3),
 (1128, 1600, 3),
 (947, 1426, 3),
 (696, 1152, 3),
 (712, 1184, 3),
 (1806, 1911, 3),
 (1056, 1448, 3),
 (824, 1336, 3),
 (744, 1096, 3),
 (968, 1376, 3),
 (520, 880, 3),
 (688, 952, 3),
 (1180, 1504, 3),
 (728, 1176, 3),
 (606, 926, 3),
 (568, 952, 3),
 (476, 781, 3),
 (1176, 1520, 3),
 (232, 445, 3),
 (1138, 1644, 3),
 (1044, 1338, 3),
 (648, 1072, 3),
 (824, 1032, 3),
 (552, 880, 3),
 (808, 1184, 3),
 (740, 998, 3),
 (960, 1304, 3),
 (680, 1016, 3),
 (688, 1152, 3),
 (664, 1048, 3),
 (632, 920, 3),
 (728, 1216, 3),
 (768, 1136, 3),
 (1137, 1478, 3),
 (856, 1208, 3),
 (817, 1352, 3),
 (560, 992, 3),
 (800, 1240, 3),
 (792, 1352, 3),
 (1288, 1704, 3),
 (600, 920, 3),
 (1136, 1736, 3),
 (760, 1176, 3),
 (1008, 1544, 3),
 (840, 1352, 3),
 (640, 1168, 3),
 (1272, 1560, 3),
 (735, 1022, 3),
 (624, 976, 3),
 (1040, 1496, 3),
 (832, 1160, 3),
 (836, 1164, 3),
 (800, 1216, 3),
 (1345, 1654, 3),
 (1192, 1536, 3),
 (952, 1432, 3),
 (329, 647, 3),
 (792, 1248, 3),
 (640, 1008, 3),
 (768, 1336, 3),
 (584, 904, 3),
 (864, 1392, 3),
 (736, 1056, 3),
 (568, 920, 3),
 (672, 888, 3),
 (1088, 1664, 3),
 (1632, 1824, 3),
 (1104, 1400, 3),
 (880, 1216, 3),
 (768, 1056, 3),
 (856, 1448, 3),
 (792, 1376, 3),
 (880, 1216, 3),
 (720, 1152, 3),
 (182, 453, 3),
 (356, 693, 3),
 (1024, 1616, 3),
 (928, 1296, 3),
 (560, 1016, 3),
 (760, 1248, 3),
 (840, 1168, 3),
 (736, 1104, 3),
 (736, 1128, 3),
 (688, 1224, 3),
 (457, 703, 3),
 (688, 968, 3),
 (688, 1088, 3),
 (888, 1336, 3),
 (720, 1040, 3),
 (1424, 1736, 3),
 (968, 1304, 3),
 (792, 1384, 3),
 (552, 1064, 3),
 (888, 1136, 3),
 (1080, 1458, 3),
 (1029, 1310, 3),
 (229, 549, 3),
 (1048, 1472, 3),
 (872, 1168, 3),
 (1288, 1488, 3),
 (672, 1192, 3),
 (1008, 1320, 3),
 (704, 1120, 3),
 (688, 824, 3),
 (656, 896, 3),
 (797, 1310, 3),
 (680, 1136, 3),
 (275, 514, 3),
 (712, 1024, 3),
 (1066, 1532, 3),
 (905, 1280, 3),
 (1000, 1320, 3),
 (720, 1112, 3),
 (782, 1234, 3),
 (856, 1328, 3),
 (656, 1088, 3),
 (1736, 1696, 3),
 (1040, 1360, 3),
 (1336, 1504, 3),
 (808, 1456, 3),
 (632, 1048, 3),
 (880, 1336, 3),
 (1944, 1624, 3),
 (1029, 1530, 3),
 (696, 1176, 3),
 (1080, 1520, 3),
 (768, 1016, 3),
 (219, 527, 3),
 (992, 1720, 3),
 (600, 992, 3),
 (688, 1000, 3),
 (793, 1144, 3),
 (808, 920, 3),
 (872, 1168, 3),
 (960, 1240, 3),
 (1016, 1320, 3),
 (728, 1016, 3),
 (776, 1080, 3),
 (519, 819, 3),
 (766, 1132, 3),
 (504, 888, 3),
 (672, 984, 3),
 (1240, 1720, 3),
 (568, 1088, 3),
 (680, 1040, 3),
 (784, 1328, 3),
 (1384, 1592, 3),
 (1200, 1560, 3),
 (568, 832, 3),
 (624, 976, 3),
 (592, 1064, 3),
 (696, 1256, 3),
 (536, 1088, 3),
 (132, 446, 3),
 (472, 856, 3),
 (936, 1304, 3),
 (720, 976, 3),
 (440, 888, 3),
 (736, 1272, 3),
 (790, 1144, 3),
 (784, 1120, 3),
 (1032, 1160, 3),
 (1472, 1848, 3),
 (670, 1008, 3),
 (1056, 1464, 3),
 (1160, 1640, 3),
 (680, 920, 3),
 (187, 445, 3),
 (808, 1080, 3),
 (1016, 1416, 3),
 (800, 1160, 3),
 (576, 984, 3),
 (944, 1216, 3),
 (528, 990, 3),
 (693, 1086, 3),
 (720, 1128, 3),
 (624, 1016, 3),
 (808, 1232, 3),
 (800, 1072, 3),
 (999, 1376, 3),
 (656, 1088, 3),
 (672, 1192, 3),
 (952, 1328, 3),
 (824, 1544, 3),
 (760, 1168, 3),
 (728, 1112, 3),
 (1350, 1840, 3),
 (800, 1376, 3),
 (1072, 1404, 3),
 (816, 1016, 3),
 (848, 1288, 3),
 (2104, 2056, 3),
 (688, 1184, 3),
 (688, 1088, 3),
 (206, 508, 3),
 (672, 832, 3),
 (1176, 1576, 3),
 (440, 968, 3),
 (712, 1088, 3),
 (375, 765, 3),
 (1133, 1508, 3),
 (608, 984, 3),
 (736, 1048, 3),
 (440, 952, 3),
 (616, 952, 3),
 (488, 1096, 3),
 (816, 1176, 3),
 (1044, 1366, 3),
 (1048, 1488, 3),
 (792, 1328, 3),
 (944, 1472, 3),
 (904, 1472, 3),
 (1280, 1792, 3),
 (984, 1320, 3),
 (672, 968, 3),
 (1208, 1616, 3),
 (1044, 1496, 3),
 (584, 832, 3),
 (736, 1160, 3),
 (488, 760, 3),
 (1456, 1808, 3),
 (744, 976, 3),
 (728, 1128, 3),
 (848, 1240, 3),
 (792, 1216, 3),
 (816, 1432, 3),
 (840, 1288, 3),
 (528, 864, 3),
 (1447, 1841, 3),
 (784, 1112, 3),
 (632, 1112, 3),
 (472, 880, 3),
 (656, 1032, 3),
 (464, 880, 3),
 (592, 832, 3),
 (944, 1384, 3),
 (752, 1040, 3),
 (1000, 1360, 3),
 (792, 1184, 3),
 (990, 1492, 3),
 (760, 1064, 3),
 (1176, 1600, 3),
 (544, 1040, 3),
 (792, 1184, 3),
 (848, 1344, 3),
 (712, 1024, 3),
 (744, 1232, 3),
 (688, 1040, 3),
 (1120, 1208, 3),
 (648, 944, 3),
 (883, 1255, 3),
 (1261, 1360, 3),
 (768, 1256, 3),
 (901, 1392, 3),
 (504, 896, 3),
 (1088, 1512, 3),
 (656, 880, 3),
 (1440, 2072, 3),
 (614, 990, 3),
 (968, 1232, 3),
 (552, 824, 3),
 (896, 1296, 3),
 (794, 1454, 3),
 (1044, 1381, 3),
 (616, 848, 3),
 (824, 1200, 3),
 (880, 1256, 3),
 (912, 1272, 3),
 (552, 1024, 3),
 (920, 1080, 3),
 (609, 941, 3),
 (536, 912, 3),
 (984, 1520, 3),
 (584, 1008, 3),
 (912, 1136, 3),
 (640, 1064, 3),
 (947, 1450, 3),
 (976, 1256, 3),
 (656, 1224, 3),
 (576, 752, 3),
 (856, 1264, 3),
 (1091, 1504, 3),
 (904, 1280, 3),
 (697, 1033, 3),
 (688, 1040, 3),
 (1768, 2072, 3),
 (560, 880, 3),
 (1024, 1336, 3),
 (912, 1088, 3),
 (1048, 1472, 3),
 (1144, 1552, 3),
 (1560, 1768, 3),
 (456, 984, 3),
 (848, 1576, 3),
 (776, 992, 3),
 (768, 1208, 3),
 (736, 1320, 3),
 (592, 936, 3),
 (736, 1104, 3),
 (851, 1288, 3),
 (832, 1360, 3),
 (995, 1348, 3),
 (504, 840, 3),
 (840, 1218, 3),
 (944, 1208, 3),
 (864, 1232, 3),
 (408, 668, 3),
 (1149, 1518, 3),
 (1099, 1662, 3),
 (1392, 1480, 3),
 (864, 1168, 3),
 (544, 904, 3),
 (1000, 1216, 3),
 (600, 1144, 3),
 (520, 984, 3),
 (640, 864, 3),
 (656, 1008, 3),
 (824, 1208, 3),
 (1072, 1400, 3),
 (672, 944, 3),
 (800, 1352, 3),
 (560, 888, 3),
 (1176, 1600, 3),
 (1208, 1368, 3),
 (872, 1304, 3),
 (664, 1176, 3),
 (1044, 1589, 3),
 (519, 790, 3),
 (728, 1104, 3),
 (800, 1312, 3),
 (784, 1080, 3),
 (280, 456, 3),
 (912, 1328, 3),
 (872, 1216, 3),
 (720, 1584, 3),
 (712, 1344, 3),
 (960, 1056, 3),
 (736, 1160, 3),
 (1731, 2080, 3),
 (829, 1094, 3),
 (1368, 1328, 3),
 (627, 1040, 3),
 (736, 1032, 3),
 (784, 1112, 3),
 (776, 936, 3),
 (632, 1080, 3),
 (1123, 1467, 3),
 (640, 848, 3),
 (872, 1216, 3),
 (896, 1336, 3),
 (664, 1008, 3),
 (1032, 1280, 3),
 (968, 1472, 3),
 (888, 1256, 3),
 (616, 888, 3),
 (1296, 1464, 3),
 (1216, 1792, 3),
 (1072, 1384, 3),
 (1365, 1736, 3),
 (624, 896, 3),
 (1080, 1528, 3),
 (1000, 1168, 3),
 (1056, 1424, 3),
 (592, 885, 3),
 (586, 931, 3),
 (528, 1032, 3),
 (624, 944, 3),
 (528, 992, 3),
 (792, 1440, 3),
 (353, 711, 3),
 (752, 1104, 3),
 (688, 1072, 3),
 (840, 1088, 3),
 (1112, 1456, 3),
 (872, 1272, 3),
 (848, 1312, 3),
 (592, 1024, 3),
 (616, 1048, 3),
 (752, 1176, 3),
 (872, 1368, 3),
 (904, 1256, 3),
 (584, 912, 3),
 (728, 1000, 3),
 (936, 1336, 3),
 (512, 800, 3),
 (696, 1056, 3),
 (968, 1296, 3),
 (840, 1552, 3),
 (816, 1016, 3),
 (696, 1048, 3),
 (872, 1512, 3),
 (1200, 1552, 3),
 (704, 1008, 3),
 (872, 1160, 3),
 (680, 1056, 3),
 (279, 550, 3),
 (848, 1336, 3),
 (824, 1336, 3),
 (1024, 1496, 3),
 (1088, 1232, 3),
 (805, 1364, 3),
 (768, 1280, 3),
 (960, 1352, 3),
 (816, 1360, 3),
 (856, 1152, 3),
 (138, 400, 3),
 (1152, 1720, 3),
 (512, 976, 3),
 (1056, 1592, 3),
 (1144, 1568, 3),
 (856, 1144, 3),
 (875, 1326, 3),
 (776, 1240, 3),
 (840, 1168, 3),
 (704, 1128, 3),
 (904, 1264, 3),
 (704, 1016, 3),
 (1672, 1824, 3),
 (920, 1520, 3),
 (784, 1144, 3),
 (784, 1272, 3),
 (1176, 1664, 3),
 (664, 1160, 3),
 (616, 992, 3),
 (824, 1304, 3),
 (712, 824, 3),
 (1328, 1576, 3),
 (656, 1144, 3),
 (440, 832, 3),
 (463, 827, 3),
 (1928, 1912, 3),
 (250, 450, 3),
 (808, 1096, 3),
 (720, 1128, 3),
 (680, 1104, 3),
 (616, 1120, 3),
 (200, 462, 3),
 (642, 1056, 3),
 (984, 1328, 3),
 (994, 1272, 3),
 (1096, 1664, 3),
 (632, 1064, 3),
 (544, 848, 3),
 (576, 904, 3),
 (709, 1236, 3),
 (840, 1336, 3),
 (1048, 1304, 3),
 (898, 1400, 3),
 (632, 1024, 3),
 (952, 1376, 3),
 (760, 1144, 3),
 (560, 1000, 3),
 (848, 1184, 3),
 (664, 1152, 3),
 (888, 1312, 3),
 (928, 1144, 3),
 (832, 968, 3),
 (1056, 1896, 3),
 (1079, 1338, 3),
 (960, 1384, 3),
 (528, 992, 3),
 (848, 1040, 3),
 (640, 824, 3),
 (664, 1040, 3),
 (952, 1064, 3),
 (760, 1168, 3),
 (960, 1320, 3),
 (800, 1416, 3),
 (736, 1120, 3),
 (744, 1104, 3),
 (560, 1016, 3),
 (776, 1160, 3),
 (607, 1015, 3),
 (576, 896, 3),
 (840, 1144, 3),
 (896, 1056, 3),
 (543, 738, 3),
 (480, 1024, 3),
 (520, 1192, 3),
 (353, 713, 3),
 (760, 1176, 3),
 (1224, 1440, 3),
 (800, 1168, 3),
 (688, 1168, 3),
 (1168, 1424, 3),
 (437, 736, 3),
 (563, 869, 3),
 (528, 1040, 3),
 (848, 1280, 3),
 (648, 920, 3),
 (832, 976, 3),
 (1176, 1648, 3),
 (592, 1104, 3),
 (560, 992, 3),
 (800, 1312, 3),
 (512, 832, 3),
 (872, 1312, 3),
 (664, 952, 3),
 (784, 1152, 3),
 (784, 1176, 3),
 (720, 1208, 3),
 (896, 1104, 3),
 (720, 1072, 3),
 (648, 1312, 3),
 (792, 1280, 3),
 (1432, 1760, 3),
 (507, 843, 3),
 (696, 1016, 3),
 (1265, 1736, 3),
 (704, 1400, 3),
 (824, 1296, 3),
 (504, 976, 3),
 (736, 1160, 3),
 (680, 1000, 3),
 (528, 832, 3),
 (664, 928, 3),
 (784, 1176, 3),
 (728, 1104, 3),
 (672, 1216, 3),
 (968, 1408, 3),
 (800, 1320, 3),
 (1152, 1504, 3),
 (896, 1320, 3),
 (680, 896, 3),
 (880, 1448, 3),
 (1208, 1536, 3),
 (752, 928, 3),
 (936, 1640, 3),
 (512, 1064, 3),
 (664, 992, 3),
 (720, 1256, 3),
 (520, 1072, 3),
 (688, 1248, 3),
 (1176, 1432, 3),
 (1240, 1720, 3),
 (921, 1298, 3),
 (577, 856, 3),
 (658, 1244, 3),
 (1576, 1848, 3),
 (1400, 1632, 3),
 (888, 1288, 3),
 (784, 1152, 3),
 (816, 1288, 3),
 (592, 1000, 3),
 (1000, 1296, 3),
 (896, 1224, 3),
 (832, 1664, 3),
 (1072, 1416, 3),
 (1176, 1448, 3),
 (976, 1456, 3),
 (789, 1304, 3),
 (696, 1144, 3),
 (752, 1128, 3),
 (744, 1032, 3),
 (635, 925, 3),
 (688, 984, 3),
 (968, 1432, 3),
 (688, 960, 3),
 (816, 1248, 3),
 (1592, 1880, 3),
 (789, 1218, 3),
 (528, 1008, 3),
 (1544, 1704, 3),
 (664, 1184, 3),
 (879, 1406, 3),
 (784, 1056, 3),
 (528, 896, 3),
 (244, 500, 3),
 (760, 1272, 3),
 (209, 482, 3),
 (880, 1064, 3),
 (560, 848, 3),
 (624, 896, 3),
 (1072, 1256, 3),
 (1056, 1504, 3),
 (1216, 1416, 3),
 (1264, 1872, 3),
 (768, 992, 3),
 (735, 918, 3),
 (1008, 1440, 3),
 (984, 1416, 3),
 (744, 1032, 3),
 (552, 848, 3),
 (512, 904, 3),
 (824, 1376, 3),
 (809, 1290, 3),
 (832, 1296, 3),
 (680, 1480, 3),
 (552, 1056, 3),
 (664, 960, 3),
 (704, 928, 3),
 (736, 920, 3),
 (696, 1040, 3),
 (704, 1088, 3),
 (932, 1240, 3),
 (1176, 1536, 3),
 (584, 984, 3),
 (1144, 1480, 3),
 (1533, 1985, 3),
 (888, 1072, 3),
 (1352, 1808, 3),
 (904, 1408, 3),
 (736, 976, 3),
 (1144, 1576, 3),
 (552, 896, 3),
 (1104, 1456, 3),
 (704, 1208, 3),
 (684, 936, 3),
 (664, 1056, 3),
 (888, 1264, 3),
 (439, 712, 3),
 (520, 952, 3),
 (536, 904, 3),
 (792, 1144, 3),
 (1064, 1536, 3),
 (904, 1336, 3),
 (592, 1120, 3),
 (1033, 1470, 3),
 (968, 1328, 3),
 (608, 1080, 3),
 (1112, 1312, 3),
 (976, 1496, 3),
 (1376, 1528, 3),
 (463, 700, 3),
 (528, 800, 3),
 (880, 1184, 3),
 (864, 1264, 3),
 (624, 1000, 3),
 (1224, 1424, 3),
 (864, 1208, 3),
 (1072, 1416, 3),
 (600, 1008, 3),
 (576, 904, 3),
 (1160, 1560, 3),
 (680, 1072, 3),
 (1059, 1418, 3),
 (704, 1232, 3),
 (674, 1106, 3),
 (600, 968, 3),
 (560, 1040, 3),
 (1104, 1616, 3),
 (852, 1384, 3),
 (855, 1006, 3),
 (728, 1016, 3),
 (571, 942, 3),
 (648, 856, 3),
 (968, 1288, 3),
 (647, 1020, 3),
 (584, 1016, 3),
 (816, 1024, 3),
 (536, 1000, 3),
 (624, 904, 3),
 (2096, 2008, 3),
 (955, 1550, 3),
 (1816, 1940, 3),
 (672, 1152, 3),
 (531, 817, 3),
 (553, 869, 3),
 (880, 1288, 3),
 (1183, 1592, 3),
 (1021, 1446, 3),
 (688, 1232, 3),
 (752, 1152, 3),
 (917, 1276, 3),
 (728, 1000, 3),
 (760, 1176, 3),
 (1160, 1328, 3),
 (910, 1400, 3),
 (712, 912, 3),
 (536, 1032, 3),
 (664, 1152, 3),
 (640, 936, 3),
 (732, 1172, 3),
 (616, 936, 3),
 (1312, 1624, 3),
 (1647, 2006, 3),
 (776, 1224, 3),
 (624, 1000, 3),
 (808, 1192, 3),
 (947, 1276, 3),
 (289, 493, 3),
 (824, 1104, 3),
 (512, 984, 3),
 (1184, 1784, 3),
 (576, 928, 3),
 (674, 1152, 3),
 (552, 976, 3),
 (720, 1080, 3),
 (594, 765, 3),
 (1120, 1368, 3),
 (696, 1072, 3),
 (485, 828, 3),
 (1152, 1432, 3),
 (904, 1368, 3),
 (1210, 1546, 3),
 (736, 1120, 3),
 (576, 816, 3),
 (608, 904, 3),
 (453, 743, 3),
 (520, 1128, 3),
 (984, 1368, 3),
 (504, 928, 3),
 (808, 1192, 3),
 (656, 984, 3),
 (808, 1192, 3),
 (816, 1336, 3),
 (576, 992, 3),
 (576, 904, 3),
 (1008, 1632, 3),
 (824, 1112, 3),
 (752, 1128, 3),
 (1052, 1400, 3),
 (880, 1264, 3),
 (1056, 1320, 3),
 (986, 1202, 3),
 (888, 1328, 3),
 (496, 788, 3),
 (1349, 1928, 3),
 (940, 1314, 3),
 (1064, 1552, 3),
 (832, 1122, 3),
 (600, 976, 3),
 (720, 1256, 3),
 (959, 1574, 3),
 (560, 1000, 3),
 (584, 1008, 3),
 (1600, 1960, 3),
 (768, 1168, 3),
 (640, 1000, 3),
 (1728, 2088, 3),
 (648, 1088, 3),
 (1071, 1574, 3),
 (592, 992, 3),
 (784, 1224, 3),
 (528, 920, 3),
 (1512, 1800, 3),
 (1481, 1592, 3),
 (768, 1160, 3),
 (552, 1064, 3),
 (1432, 1808, 3),
 (639, 1122, 3),
 (1168, 1728, 3),
 (968, 1352, 3),
 (760, 1072, 3),
 (872, 1464, 3),
 (968, 1328, 3),
 (712, 1008, 3),
 (608, 928, 3),
 (864, 1184, 3),
 (728, 1216, 3),
 (720, 920, 3),
 (724, 1110, 3),
 (238, 462, 3),
 (728, 1008, 3),
 (576, 896, 3),
 (768, 1288, 3),
 (856, 1208, 3),
 (928, 1240, 3),
 (496, 936, 3),
 (680, 992, 3),
 (664, 968, 3),
 (512, 824, 3),
 (1282, 1870, 3),
 (712, 1088, 3),
 (416, 904, 3),
 (928, 1478, 3),
 (680, 920, 3),
 (696, 1208, 3),
 (688, 1152, 3),
 (1224, 1328, 3),
 (1112, 1448, 3),
 (1384, 1800, 3),
 (1240, 1320, 3),
 (770, 1072, 3),
 (944, 1216, 3),
 (608, 824, 3),
 (744, 1120, 3),
 (808, 1280, 3),
 (648, 1152, 3),
 (936, 1216, 3),
 (1024, 1616, 3),
 (616, 992, 3),
 (960, 1408, 3),
 (568, 1000, 3),
 (584, 952, 3),
 (1164, 1600, 3),
 (768, 1048, 3),
 (640, 1120, 3),
 (568, 904, 3),
 (1006, 1306, 3),
 (1080, 1400, 3),
 (863, 1244, 3),
 (744, 1216, 3),
 (928, 1200, 3),
 (864, 1136, 3),
 (936, 1504, 3),
 (800, 1288, 3),
 (560, 1160, 3),
 (648, 960, 3),
 (616, 896, 3),
 (752, 1080, 3),
 (760, 1064, 3),
 (496, 856, 3),
 (1040, 1216, 3),
 (808, 1016, 3),
 (800, 1176, 3),
 (712, 1392, 3),
 (856, 1248, 3),
 (1040, 1496, 3),
 ...]

Recorremos la lista anterior para ver conocer las caracteristicas de nuestras imágenes

In [ ]:
suma1 = 0
suma2 = 0
maximoele = 0
minimoele = 9999
maximoele2 = 0
minimoele2 = 9999

for elemento in listadeforma:
    suma1 += elemento[0]
    suma2 += elemento[1]
    if elemento[0]>maximoele:
        maximoele = elemento[0]
    elif elemento[0] < minimoele: 
        minimoele = elemento[0] 
        
    if elemento[1]>maximoele2:
        maximoele2 = elemento[1]
    elif elemento[1] < minimoele2: 
        minimoele2 = elemento[1] 

numero_de_elementos = len(listadeforma)
media1 = suma1 / numero_de_elementos
media2 = suma2 / numero_de_elementos

print(f"Media del primer elemento: {media1}")
print(f"Media del segundo elemento: {media2}")
print(f"Mínimo del primer elemento: {minimoele}")
print(f"Máximo del primer elemento: {maximoele}")
print(f"Mínimo del primer elemento: {minimoele2}")
print(f"Máximo del primer elemento: {maximoele2}")
Media del primer elemento: 968.0747699386503
Media del segundo elemento: 1320.6108128834355
Mínimo del primer elemento: 127
Máximo del primer elemento: 2663
Mínimo del primer elemento: 384
Máximo del primer elemento: 2916
In [ ]:
suma1 = 0
suma2 = 0
suma3 = 0

for elemento in media_canales:
    suma1 += elemento[0]
    suma2 += elemento[1]
    suma3 += elemento[2]

numero_de_elementos = len(media_canales)
media1 = suma1 / numero_de_elementos
media2 = suma2 / numero_de_elementos
media3 = suma3 / numero_de_elementos

print(f"Media del primer elemento: {media1}")
print(f"Media del segundo elemento: {media2}")
print(f"Media del tercer elemento: {media3}")
Media del primer elemento: 122.1830586137792
Media del segundo elemento: 122.1830586137792
Media del tercer elemento: 122.1830586137792

Lectura de imagenes para ver como son las imagenes.

In [ ]:
normal = os.listdir("working/train_dataset/NORMAL")
normal_dir = "working/train_dataset/NORMAL"
plt.figure(figsize=(12, 10))
for i in range(2):
    
    plt.subplot(3, 3, i + 1)
    img = plt.imread(os.path.join(normal_dir, normal[i]))
    print(img.shape)
    #plt.title("Normal")
    plt.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
    #plt.imshow(img, cmap='gray')
    plt.axis('off')
plt.tight_layout()
(2234, 2359)
(1067, 1422)
In [ ]:
pneumonia = os.listdir("working/train_dataset/PNEUMONIA")
pneumonia_dir = "working/train_dataset/PNEUMONIA"

#plt.figure(figsize=(20, 10))
plt.figure(figsize=(12, 10))
for i in range(2):
    plt.subplot(3, 3, i + 1)
    img = plt.imread(os.path.join(pneumonia_dir, pneumonia[i]))
    #plt.title("Neumonía")
    plt.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
    #plt.imshow(img, cmap='gray')
    plt.axis('off')
    

Hay muy pocos datos en la particion de validacion(16), vamos a pasarle algunas imagenes de train a val para que podamos trabajar con una particion de val decente

In [ ]:
training_images = tf.io.gfile.glob('chest_xray/train/*/*')
validation_images = tf.io.gfile.glob('chest_xray/val/*/*')

print(f'Before division of 80:20')
print(f'Total number of training images = {len(training_images)}')
print(f'Total number of validation images = {len(validation_images)}\n')


total_files = training_images
total_files.extend(validation_images)
print(f'Total number of images : training_images + validation_images = {len(total_files)}\n')

#lo partimos en 0.2 a val
train_images, val_images = train_test_split(total_files, test_size = 0.2 )
print(f'After division of 80:20')
print(f'Total number of training images = {len(train_images)}')
print(f'Total number of validation images = {len(val_images)}')
Before division of 80:20
Total number of training images = 5217
Total number of validation images = 16

Total number of images : training_images + validation_images = 5233

After division of 80:20
Total number of training images = 4186
Total number of validation images = 1047

Cambiar a python si no se tiene esos directorios para crear nuevos directorios con la particion de datos de train a val

for ele in train_images: parts_of_path = ele.split('/')

if 'PNEUMONIA' == parts_of_path[-2]:
    tf.io.gfile.copy(src = ele, dst = 'working/train_dataset/PNEUMONIA/' +  parts_of_path[-1])
else:
    tf.io.gfile.copy(src = ele, dst = 'working/train_dataset/NORMAL/' +  parts_of_path[-1])

for ele in val_images: parts_of_path = ele.split('/')

if 'PNEUMONIA' == parts_of_path[-2]:
    tf.io.gfile.copy(src = ele, dst = 'working/val_dataset/PNEUMONIA/' +  parts_of_path[-1])
else:
    tf.io.gfile.copy(src = ele, dst = 'working/val_dataset/NORMAL/' +  parts_of_path[-1])
In [ ]:
data_dir_nueva = 'working'
train_dir = os.path.join(data_dir_nueva, 'train_dataset')
val_dir = os.path.join(data_dir_nueva, 'val_dataset')
test_dir = os.path.join(data_dir, 'test')

train_dir_downsampling = os.path.join('downsampling manual', 'train_dataset')
train_dir_oversampling = os.path.join('oversampling manual', 'train_dataset')

Distribución de datos de la variable objetiva¶

In [ ]:
count_normal = len(os.listdir(os.path.join(train_dir, 'NORMAL')))

count_pneumonia = len(os.listdir(os.path.join(train_dir, 'PNEUMONIA')))

datafram_distribucion = [[count_normal, 'normal'], [count_pneumonia, 'pneumonia']]

# Crear el dataframe
df_dist = pd.DataFrame(datafram_distribucion, columns=['valor', 'etiqueta'])
sns.set_style('ticks')
sns.barplot(x='etiqueta', y='valor', data=df_dist)
Out[ ]:
<Axes: xlabel='etiqueta', ylabel='valor'>
In [ ]:
count_normal_val = len(os.listdir(os.path.join(val_dir, 'NORMAL')))

count_pneumonia_val = len(os.listdir(os.path.join(val_dir, 'PNEUMONIA')))
datafram_distribucion_val = [[count_normal_val, 'normal'], [count_pneumonia_val, 'pneumonia']]

df_dist_val = pd.DataFrame(datafram_distribucion_val, columns=['valor', 'etiqueta'])
sns.set_style('ticks')
sns.barplot(x='etiqueta', y='valor', data=df_dist_val)

print(count_normal_val)
print(count_pneumonia_val)
268
780

Preprocesamiento de datos¶

Cargamos los datos usando la función image_dataset_from_directory(ahora usamos image.imagedatagenerator) para además de cargarlos poder hacer ciertos tratamientos sobre ellos.

data = tf.keras.utils.image_dataset_from_directory(train_dir, color_mode="rgb", image_size=(256, 256))

Usamos un generador de imagenes para hacer ciertos procesos de data augmentation y preprocesamiento de nuestras imágenes iniciales.

In [ ]:
##train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale = 1/255, # normalizacion
#                                                    rotation_range = 30, # rotacion de 30
#                                                    zoom_range = 0.2,
#                                                    width_shift_range = 0.1,
#                                                    height_shift_range = 0.1
#)

# Aumento de datos basados del articulo 
train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(
    rescale=1./255,
    rotation_range=20,
    width_shift_range=0.2,
    height_shift_range=0.2,
    zoom_range=0.2
)

train_datagen_sinaumento = tf.keras.preprocessing.image.ImageDataGenerator(
    rescale=1./255)

Convertimos nuestras imágenes a tamaño de píxel 300x300 para agilizar el entrenamiento de los modelos y asignamos un tamaño de batch de 64 y le indicamos que es binaria

In [ ]:
train_generator = train_datagen.flow_from_directory(
    train_dir,
    target_size = (300,300),
    batch_size = 64 ,
    shuffle= True,
    class_mode = 'binary'
)
Found 4185 images belonging to 2 classes.
In [ ]:
train_generator_downsampling = train_datagen.flow_from_directory(
    train_dir_downsampling,
    target_size = (300,300),
    batch_size = 64 ,
    shuffle= True,
    class_mode = 'binary'
)
Found 2438 images belonging to 2 classes.
In [ ]:
train_generator_oversampling = train_datagen.flow_from_directory(
    train_dir_oversampling,
    target_size = (300,300),
    batch_size = 64 ,
    shuffle= True,
    class_mode = 'binary'
)
Found 5267 images belonging to 2 classes.
In [ ]:
train_generator_oversampling2 = train_datagen.flow_from_directory(
    train_dir_oversampling,
    target_size = (224,224),
    batch_size = 64 ,
    shuffle= True,
    class_mode = 'binary'
)
Found 5267 images belonging to 2 classes.
In [ ]:
train_generator
Out[ ]:
<keras.preprocessing.image.DirectoryIterator at 0x7feae04a87c0>
In [ ]:
print(len(train_generator_downsampling.classes[train_generator_downsampling.classes==0]))
print(len(train_generator_downsampling.classes[train_generator_downsampling.classes==1]))
1082
1356
In [ ]:
print(len(train_generator_oversampling.classes[train_generator_oversampling.classes==0]))
print(len(train_generator_oversampling.classes[train_generator_oversampling.classes==1]))
2164
3103
In [ ]:
print(len(train_generator.classes[train_generator.classes==0]))
print(len(train_generator.classes[train_generator.classes==1]))
1082
3103
In [ ]:
batch_buena= train_generator.next()
In [ ]:
batch_oversampled= train_generator_oversampling.next()
In [ ]:
images_oversampled = batch_oversampled[0]
labels_oversampled = batch_oversampled[1]
In [ ]:
labels_oversampled
Out[ ]:
array([0., 0., 1., 0., 1., 0., 0., 1., 0., 1., 1., 1., 0., 1., 1., 0., 1.,
       0., 0., 1., 0., 0., 1., 1., 1., 0., 0., 0., 1., 1., 1., 1., 1., 1.,
       0., 1., 0., 0., 1., 1., 1., 0., 1., 1., 0., 1., 0., 1., 1., 1., 0.,
       0., 0., 1., 0., 1., 1., 0., 1., 0., 1., 1., 0., 1.], dtype=float32)
In [ ]:
len(labels_oversampled[labels_oversampled==0])
Out[ ]:
28
In [ ]:
#nueva
fig, axs = plt.subplots(2, 2, figsize=(8, 8))
axs = axs.flatten()
j= 9
for i, img in enumerate(images_oversampled[9:13]):
    img= img*255
    axs[i].imshow(img.astype(np.uint8))
    axs[i].set_title('Etiqueta: {}'.format(labels_oversampled[j]))
    axs[i].axis('off')
    j+=1
In [ ]:
images = batch_buena[0]
labels = batch_buena[1]
In [ ]:
labels
Out[ ]:
array([0., 1., 1., 1., 1., 1., 1., 0., 1., 1., 1., 0., 0., 1., 1., 0., 1.,
       1., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 1., 1., 0., 1., 1.,
       1., 0., 1., 1., 1., 0., 1., 1., 0., 1., 1., 1., 0., 1., 1., 1., 1.,
       0., 1., 0., 0., 1., 1., 1., 1., 0., 1., 0., 0., 1.], dtype=float32)
In [ ]:
#nueva
fig, axs = plt.subplots(2, 2, figsize=(8, 8))
axs = axs.flatten()
j= 30
for i, img in enumerate(images[30:34]):
    img= img*255
    axs[i].imshow(img.astype(np.uint8))
    axs[i].set_title('Etiqueta: {}'.format(labels[j]))
    axs[i].axis('off')
    j+=1
In [ ]:
fig, axs = plt.subplots(2, 2, figsize=(8, 8))
axs = axs.flatten()
for i, img in enumerate(images[:4]):
    img= img*255
    axs[i].imshow(img.astype(np.uint8))
    axs[i].set_title('Etiqueta: {}'.format(labels[i]))
    axs[i].axis('off')

0 = NORMAL 1 = PNEUMONIA

Normalización de datos¶

Dividimos el valor de cada pixel en 255 por lo que convertimos el rango de 0-255 a 0-1 para mejorar la eficiencia a la hora de pasarlos a nuestros modelos de machine learning

Confirmamos que hemos hecho bien la normalización del valor de nuestros píxeles.

In [ ]:
print(train_generator.next()[0].min())
train_generator.next()[0].max()
0.0
Out[ ]:
1.0

Partición de datos¶

En nuestro caso ya tenemos los datos particionados en diferentes carpetas por lo que las utilizaremos directamente.

Lo que debemos de tener en cuenta es que no debemos usar el mismo generador de imagenes de train para datos de validacion y test.

La función crea batch de 64 pero en la vida real no procesamos las imágenes en batch en el mismo tiempo, sino que van de una en una, por lo que cambiamos ese parámetro a 1.

El valor por defecto de shuffle es "True", pero para caso de validación y test tampoco necesitamos tener los datos mezclados así que lo cambiamos a false.

In [ ]:
val_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale = 1/255)
In [ ]:
validation_generator = val_datagen.flow_from_directory(
    val_dir,
    target_size = (300,300),
    batch_size = 64 ,
    class_mode = 'binary',
    shuffle= True
)
Found 1047 images belonging to 2 classes.
In [ ]:
print(len(validation_generator.classes[validation_generator.classes==0]))
print(len(validation_generator.classes[validation_generator.classes==1]))
267
780
In [ ]:
test_generator = val_datagen.flow_from_directory(
    test_dir,
    target_size = (300,300),
    batch_size = 1 ,
    class_mode = 'binary',
    shuffle=False
)
Found 624 images belonging to 2 classes.
In [ ]:
test_generator2 = val_datagen.flow_from_directory(
    test_dir,
    target_size = (224,224),
    batch_size = 1 ,
    class_mode = 'binary',
    shuffle=False
)
Found 624 images belonging to 2 classes.

Corregir pesos debidos a datos imbalanceados¶

In [ ]:
initial_bias = np.log([count_pneumonia/count_normal])
initial_bias
Out[ ]:
array([1.05355821])
In [ ]:
weight_for_0 = (1 / count_normal)*(len(train_images))/2.0 
weight_for_1 = (1 / count_pneumonia)*(len(train_images))/2.0

class_weight = {0: weight_for_0, 1: weight_for_1}

print('Peso para la clase 0: {:.2f}'.format(weight_for_0))
print('Peso para la clase 1: {:.2f}'.format(weight_for_1))
Peso para la clase 0: 1.93
Peso para la clase 1: 0.67
In [ ]:
count_normal_oversampled = len(os.listdir(os.path.join(train_dir_oversampling, 'NORMAL')))

count_pneumonia_oversampled = len(os.listdir(os.path.join(train_dir_oversampling, 'PNEUMONIA')))

weight_for_0_oversampled = (1 / count_normal_oversampled)*(5269)/2.0 
weight_for_1_oversampled = (1 / count_pneumonia_oversampled)*(5269)/2.0

class_weight_oversampled = {0: weight_for_0_oversampled, 1: weight_for_0_oversampled}

print('Peso para la clase 0: {:.2f}'.format(weight_for_0_oversampled))
print('Peso para la clase 1: {:.2f}'.format(weight_for_1_oversampled))
Peso para la clase 0: 1.22
Peso para la clase 1: 0.85
In [ ]:
len(train_images)
Out[ ]:
4186

Implementación de modelos¶

Modelos nuevos creados¶

Modelo1

In [ ]:
modelo_propio1= Sequential()
In [ ]:
modelo_propio1.add(Conv2D(filters=16, kernel_size = (3,3), activation=tf.nn.relu , input_shape=(300,300,3) ))
modelo_propio1.add(MaxPooling2D())
modelo_propio1.add(Flatten())
modelo_propio1.add(Dense(1, activation='sigmoid'))

modelo_propio1.add(Conv2D(16,(3,3), 1 ,input_shape=(256,256,1), activation='relu')) modelo_propio1.add(MaxPooling2D())

modelo_propio1.add(Conv2D(32,(3,3), 1 , activation='relu')) modelo_propio1.add(MaxPooling2D())

modelo_propio1.add(Conv2D(16,(3,3), 1 , activation='relu'))

modelo_propio1.add(Flatten())

modelo_propio1.add(Dense(256, activation='relu')) modelo_propio1.add(Dense(1, activation='sigmoid'))

In [ ]:
modelo_propio1.compile(
        tf.keras.optimizers.Adam(learning_rate=0.001),
        loss=tf.losses.BinaryCrossentropy(),
        metrics=['accuracy', tf.keras.metrics.Precision(name='precision'),tf.keras.metrics.Recall(name='recall')])
In [ ]:
modelo_propio1.save('modelos_vista/propio1.h5')
In [ ]:
modelo_propio1.summary()
Model: "sequential_19"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 conv2d_49 (Conv2D)          (None, 298, 298, 16)      448       
                                                                 
 max_pooling2d_28 (MaxPoolin  (None, 149, 149, 16)     0         
 g2D)                                                            
                                                                 
 flatten_12 (Flatten)        (None, 355216)            0         
                                                                 
 dense_43 (Dense)            (None, 1)                 355217    
                                                                 
=================================================================
Total params: 355,665
Trainable params: 355,665
Non-trainable params: 0
_________________________________________________________________
In [ ]:
checkpoint_propio1 = tf.keras.callbacks.ModelCheckpoint('modelos/model_propio1.h5', save_best_only= True)
early_stopping_propio1 = tf.keras.callbacks.EarlyStopping(monitor ='val_loss', patience=20, mode = 'min',restore_best_weights=True)
tensorboard_callback_propio1 = tf.keras.callbacks.TensorBoard(log_dir='logs/propio1')

Modelo 2

In [ ]:
# Modelo 2

modelo_propio2 = Sequential()

modelo_propio2.add(Conv2D(filters=32, kernel_size=(3, 3), input_shape=(300, 300, 3), activation='relu'))
#modelo_propio2.add(BatchNormalization())
modelo_propio2.add(Conv2D(filters=32, kernel_size=(3, 3), activation='relu'))
#modelo_propio2.add(BatchNormalization())
modelo_propio2.add(MaxPooling2D(pool_size=(2, 2)))

modelo_propio2.add(Conv2D(filters=64, kernel_size=(3, 3), activation='relu'))
#modelo_propio2.add(BatchNormalization())
modelo_propio2.add(Conv2D(filters=64, kernel_size=(3, 3), activation='relu'))
#modelo_propio2.add(BatchNormalization())
modelo_propio2.add(MaxPooling2D(pool_size=(2, 2)))

modelo_propio2.add(Conv2D(filters=128, kernel_size=(3, 3), activation='relu'))
#modelo_propio2.add(BatchNormalization())
modelo_propio2.add(Conv2D(filters=128, kernel_size=(3, 3), activation='relu'))
#modelo_propio2.add(BatchNormalization())
modelo_propio2.add(MaxPooling2D(pool_size=(2, 2)))
modelo_propio2.add(Flatten())
#modelo_propio2.add(GlobalAveragePooling2D())
modelo_propio2.add(Dense(128, activation='relu'))
modelo_propio2.add(Dropout(0.2))

modelo_propio2.add(Dense(1, activation='sigmoid'))

modelo_propio2.compile(loss=tf.losses.BinaryCrossentropy(), 
              optimizer=tf.keras.optimizers.Adam(learning_rate=0.0001), 
              metrics=['accuracy', tf.keras.metrics.Precision(name='precision'),
        tf.keras.metrics.Recall(name='recall')])
In [ ]:
modelo_propio2.summary()
Model: "sequential_62"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 conv2d_569 (Conv2D)         (None, 298, 298, 32)      896       
                                                                 
 conv2d_570 (Conv2D)         (None, 296, 296, 32)      9248      
                                                                 
 max_pooling2d_84 (MaxPoolin  (None, 148, 148, 32)     0         
 g2D)                                                            
                                                                 
 conv2d_571 (Conv2D)         (None, 146, 146, 64)      18496     
                                                                 
 conv2d_572 (Conv2D)         (None, 144, 144, 64)      36928     
                                                                 
 max_pooling2d_85 (MaxPoolin  (None, 72, 72, 64)       0         
 g2D)                                                            
                                                                 
 conv2d_573 (Conv2D)         (None, 70, 70, 128)       73856     
                                                                 
 conv2d_574 (Conv2D)         (None, 68, 68, 128)       147584    
                                                                 
 max_pooling2d_86 (MaxPoolin  (None, 34, 34, 128)      0         
 g2D)                                                            
                                                                 
 flatten_31 (Flatten)        (None, 147968)            0         
                                                                 
 dense_118 (Dense)           (None, 128)               18940032  
                                                                 
 dropout_35 (Dropout)        (None, 128)               0         
                                                                 
 dense_119 (Dense)           (None, 1)                 129       
                                                                 
=================================================================
Total params: 19,227,169
Trainable params: 19,227,169
Non-trainable params: 0
_________________________________________________________________
In [ ]:
modelo_propio2.save('modelo_vista/propio3.h5')
In [ ]:
checkpoint_propio2 = tf.keras.callbacks.ModelCheckpoint('modelos/model_propio2.h5', save_best_only= True)
early_stopping_propio2 = tf.keras.callbacks.EarlyStopping(monitor ='val_loss', patience=7, mode = 'min',restore_best_weights=True)
tensorboard_callback_propio2 = tf.keras.callbacks.TensorBoard(log_dir='logs/propio2')
Modelo 3¶
In [ ]:
# Modelo 2

modelo_propio3 = Sequential()

modelo_propio3.add(Conv2D(filters=16, kernel_size=(5, 5), input_shape=(300, 300, 3), activation='relu'))
modelo_propio3.add(MaxPooling2D(pool_size=(3, 3)))

modelo_propio3.add(Conv2D(filters=32, kernel_size=(5, 5), activation='relu'))
#modelo_propio3.add(MaxPooling2D(pool_size=(3, 3)))

modelo_propio3.add(Flatten())
modelo_propio3.add(Dropout(0.2))
modelo_propio3.add(Dense(256, activation='relu'))


modelo_propio3.add(Dense(1, activation='sigmoid'))

modelo_propio3.compile(loss=tf.losses.BinaryCrossentropy(), 
              optimizer=tf.keras.optimizers.Adam(learning_rate=0.001), 
              metrics=['accuracy', tf.keras.metrics.Precision(name='precision'),
        tf.keras.metrics.Recall(name='recall')])
In [ ]:
modelo_propio3.save('modelo_vista/propio2.h5')
In [ ]:
modelo_propio3.summary()
Model: "sequential_37"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 conv2d_97 (Conv2D)          (None, 296, 296, 16)      1216      
                                                                 
 max_pooling2d_63 (MaxPoolin  (None, 98, 98, 16)       0         
 g2D)                                                            
                                                                 
 conv2d_98 (Conv2D)          (None, 94, 94, 32)        12832     
                                                                 
 flatten_20 (Flatten)        (None, 282752)            0         
                                                                 
 dropout_31 (Dropout)        (None, 282752)            0         
                                                                 
 dense_75 (Dense)            (None, 256)               72384768  
                                                                 
 dense_76 (Dense)            (None, 1)                 257       
                                                                 
=================================================================
Total params: 72,399,073
Trainable params: 72,399,073
Non-trainable params: 0
_________________________________________________________________

Modelos preentrenados¶

InceptionV3¶

Con peso asignado de imagenet

In [ ]:
inceptionv3_modelo_imagenet_base = tf.keras.applications.InceptionV3(weights='imagenet',
                                                       include_top=False,
                                                       input_shape=(300,300,3))

for layers in inceptionv3_modelo_imagenet_base.layers[:200]:
    layers.trainable = False
In [ ]:
inceptionv3_modelo_imagenet = tf.keras.Sequential([
        inceptionv3_modelo_imagenet_base,
        tf.keras.layers.GlobalAveragePooling2D(),
        tf.keras.layers.Dense(1,activation=tf.nn.sigmoid) 
        ])
In [ ]:
inceptionv3_modelo_imagenet.compile(
    loss='binary_crossentropy', 
    optimizer=tf.keras.optimizers.Adam(learning_rate=0.0001), 
    metrics= ['accuracy', tf.keras.metrics.Precision(name='precision'),
            tf.keras.metrics.Recall(name='recall')])
In [ ]:
inceptionv3_modelo_imagenet.summary()
Model: "sequential_50"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 inception_v3 (Functional)   (None, 8, 8, 2048)        21802784  
                                                                 
 global_average_pooling2d_21  (None, 2048)             0         
  (GlobalAveragePooling2D)                                       
                                                                 
 dense_104 (Dense)           (None, 1)                 2049      
                                                                 
=================================================================
Total params: 21,804,833
Trainable params: 14,806,337
Non-trainable params: 6,998,496
_________________________________________________________________
In [ ]:
checkpoint_inception = tf.keras.callbacks.ModelCheckpoint("modelos/model_inception.h5", save_best_only= True)
In [ ]:
early_stopping_inception = tf.keras.callbacks.EarlyStopping(monitor ='val_loss', patience=20, mode = 'min',restore_best_weights=True)
In [ ]:
tensorboard_callback_inception = tf.keras.callbacks.TensorBoard(log_dir='logs/inception')
ResNet50¶
In [ ]:
resnet50_modelo_base = tf.keras.applications.ResNet50V2(weights='imagenet',
                                                    include_top=False,
                                                    input_shape=(300,300,3))

#for layers in resnet50_modelo_base.layers:
#    layers.trainable = False

resnet50_modelo_base.trainable = False
In [ ]:
resnet50_modelo = tf.keras.Sequential([
        resnet50_modelo_base,
        #tf.keras.layers.GlobalAveragePooling2D(),
        tf.keras.layers.Flatten(),
        tf.keras.layers.Dense(512, activation=tf.nn.relu),
        tf.keras.layers.Dropout(0.2),
        tf.keras.layers.Dense(512, activation=tf.nn.relu),
        tf.keras.layers.Dropout(0.2),
        tf.keras.layers.Dense(1, activation=tf.nn.sigmoid),
        ])
In [ ]:
resnet50_modelo.compile(
    loss='binary_crossentropy',
    optimizer= tf.keras.optimizers.Adam(learning_rate=0.001), 
    metrics= ['accuracy', tf.keras.metrics.Precision(name='precision'),
        tf.keras.metrics.Recall(name='recall')])
In [ ]:
resnet50_modelo.summary()
Model: "sequential_7"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 resnet50v2 (Functional)     (None, 10, 10, 2048)      23564800  
                                                                 
 flatten_7 (Flatten)         (None, 204800)            0         
                                                                 
 dense_8 (Dense)             (None, 512)               104858112 
                                                                 
 dropout_1 (Dropout)         (None, 512)               0         
                                                                 
 dense_9 (Dense)             (None, 512)               262656    
                                                                 
 dropout_2 (Dropout)         (None, 512)               0         
                                                                 
 dense_10 (Dense)            (None, 1)                 513       
                                                                 
=================================================================
Total params: 128,686,081
Trainable params: 105,121,281
Non-trainable params: 23,564,800
_________________________________________________________________
In [ ]:
checkpoint_resnet = tf.keras.callbacks.ModelCheckpoint("modelos/model_resnet.h5", save_best_only= True)
In [ ]:
early_stopping_resnet = tf.keras.callbacks.EarlyStopping(monitor ='val_loss', patience=20, mode = 'min',restore_best_weights=True)
In [ ]:
tensorboard_callback_resnet = tf.keras.callbacks.TensorBoard(log_dir='logs/resnet')
In [ ]:
lr = tf.keras.callbacks.ReduceLROnPlateau(monitor='val_loss',
                                          patience=8)
VGG16¶

Usamos la función de VGG16 para crear

In [ ]:
vgg16_base = tf.keras.applications.VGG16(weights='imagenet',
                                           include_top=False,
                                           input_shape=(300,300,3))

vgg16_base.trainable = False
In [ ]:
vgg16_modelo = tf.keras.Sequential([
        vgg16_base,
        tf.keras.layers.Flatten(),
        tf.keras.layers.Dense(512, activation=tf.nn.relu),
        tf.keras.layers.Dropout(0.2),
        tf.keras.layers.Dense(512, activation=tf.nn.relu),
        tf.keras.layers.Dropout(0.2),
        tf.keras.layers.Dense(1,activation=tf.nn.sigmoid)
        ])


vgg16_modelo.compile(
        loss='binary_crossentropy',
        optimizer=tf.keras.optimizers.Adam(learning_rate=0.001), 
        metrics= ['accuracy', tf.keras.metrics.Precision(name='precision'),
        tf.keras.metrics.Recall(name='recall')])

vgg16_modelo.summary()
Model: "sequential_9"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 vgg16 (Functional)          (None, 9, 9, 512)         14714688  
                                                                 
 flatten_7 (Flatten)         (None, 41472)             0         
                                                                 
 dense_23 (Dense)            (None, 512)               21234176  
                                                                 
 dropout_10 (Dropout)        (None, 512)               0         
                                                                 
 dense_24 (Dense)            (None, 512)               262656    
                                                                 
 dropout_11 (Dropout)        (None, 512)               0         
                                                                 
 dense_25 (Dense)            (None, 1)                 513       
                                                                 
=================================================================
Total params: 36,212,033
Trainable params: 21,497,345
Non-trainable params: 14,714,688
_________________________________________________________________

En caso de aplicar early stopping:

In [ ]:
checkpoint_vgg = tf.keras.callbacks.ModelCheckpoint("modelos/model_vgg.h5", save_best_only= True)
early_stopping_vgg = tf.keras.callbacks.EarlyStopping(monitor ='val_loss', patience=20, mode = 'min',restore_best_weights=True)
tensorboard_callback_vgg = tf.keras.callbacks.TensorBoard(log_dir='logs/vgg')
Efficientnet¶
In [ ]:
efficientenet_base = tf.keras.applications.EfficientNetB0(
    include_top= False,
    weights= 'imagenet', 
    input_shape= (300,300,3)
)

efficientenet_base.trainable = False
Downloading data from https://storage.googleapis.com/keras-applications/efficientnetb0_notop.h5
16705208/16705208 [==============================] - 1s 0us/step
In [ ]:
#class_count = len(list(train_generator.class_indices.keys())) # to define number of classes in dense layer


efficientenet_model = tf.keras.Sequential([
    efficientenet_base,
    
    tf.keras.layers.GlobalAveragePooling2D(),
    #tf.keras.layers.Flatten(),
    #tf.keras.layers.Dropout(0.2),
    #tf.keras.layers.Dense(64, activation= tf.nn.relu),
    
    #tf.keras.layers.Dense(32, activation = tf.nn.relu),
    #tf.keras.layers.Dropout(0.2),
    tf.keras.layers.Dense(1, activation= tf.nn.sigmoid)
    
])

efficientenet_model.compile(
    optimizer = tf.keras.optimizers.Adam(learning_rate=0.001), 
    loss='binary_crossentropy', 
    metrics= ['accuracy',tf.keras.metrics.Precision(name='precision'),tf.keras.metrics.Recall(name='recall')]
)
### Esto es un metodo de rmsprop literal 
efficientenet_model.summary()
Model: "sequential_60"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 efficientnetb0 (Functional)  (None, 10, 10, 1280)     4049571   
                                                                 
 global_average_pooling2d_31  (None, 1280)             0         
  (GlobalAveragePooling2D)                                       
                                                                 
 dense_114 (Dense)           (None, 1)                 1281      
                                                                 
=================================================================
Total params: 4,050,852
Trainable params: 1,281
Non-trainable params: 4,049,571
_________________________________________________________________
In [ ]:
modelo_eff = tf.keras.applications.EfficientNetB4(weights = 'imagenet', 
                           include_top = False, 
                           input_shape = (224, 224, 3))

## Output layer
x = modelo_eff.output
x = GlobalAveragePooling2D()(x)
x = Dense(64, activation="relu")(x)
x = Dense(32, activation="relu")(x)
predictions = Dense(1, activation="sigmoid")(x)
## Compile and run
modelo_eff = Model(inputs=modelo_eff.input, outputs=predictions)

modelo_eff.compile(optimizer='adam',
              loss='binary_crossentropy', 
              metrics=['accuracy', tf.keras.metrics.Precision(name='precision'),tf.keras.metrics.Recall(name='recall')])
In [ ]:
checkpoint_efficientnet = tf.keras.callbacks.ModelCheckpoint("modelos/model_efficientnet.h5", save_best_only= True)
In [ ]:
early_stopping_efficientnet = tf.keras.callbacks.EarlyStopping(monitor ='val_loss', patience=7, mode = 'min',restore_best_weights=True)
In [ ]:
tensorboard_callback_efficientnet = tf.keras.callbacks.TensorBoard(log_dir='logs/efficientnet')
In [ ]:
rlr = tf.keras.callbacks.ReduceLROnPlateau(monitor='val_loss', 
                        factor=0.3, 
                        patience=3, 
                        min_lr=0.000001, 
                        verbose=1)
Mobilenet¶
In [ ]:
mobilenet_modelo = tf.keras.Sequential([
        base_model_mobil,
        tf.keras.layers.GlobalAveragePooling2D(),
        tf.keras.layers.Dense(1,activation=tf.nn.sigmoid)
        ])
In [ ]:
mobilenet_modelo.save('modelo_vista/mobilenet_bueno.h5')
WARNING:tensorflow:Compiled the loaded model, but the compiled metrics have yet to be built. `model.compile_metrics` will be empty until you train or evaluate the model.
WARNING:tensorflow:Compiled the loaded model, but the compiled metrics have yet to be built. `model.compile_metrics` will be empty until you train or evaluate the model.
In [ ]:
base_model_mobil = tf.keras.applications.MobileNet(weights='imagenet', include_top=False, input_shape=(224, 224, 3)) 

x = base_model_mobil.output

# Add a global spatial average pooling layer
x = GlobalAveragePooling2D()(x)

# Add a logistic layer
predictions = Dense(1, activation="sigmoid")(x)

model_mobil = Model(inputs=base_model_mobil.input, outputs=predictions)
# Compile model
model_mobil.compile(optimizer='adam', loss = 'binary_crossentropy',
                           metrics = ['binary_accuracy', 'mae'])
In [ ]:
print(len(modelo_propio1.layers))
print(len(modelo_propio2.layers))
print(len(modelo_propio3.layers))
print(len(vgg16_modelo.layers))
print(len(resnet50_modelo.layers))
4
13
7
7
7
In [ ]:
len(model_mobil.layers)
Out[ ]:
88
In [ ]:
model_mobil.summary()
Model: "model_2"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 input_32 (InputLayer)       [(None, 224, 224, 3)]     0         
                                                                 
 conv1 (Conv2D)              (None, 112, 112, 32)      864       
                                                                 
 conv1_bn (BatchNormalizatio  (None, 112, 112, 32)     128       
 n)                                                              
                                                                 
 conv1_relu (ReLU)           (None, 112, 112, 32)      0         
                                                                 
 conv_dw_1 (DepthwiseConv2D)  (None, 112, 112, 32)     288       
                                                                 
 conv_dw_1_bn (BatchNormaliz  (None, 112, 112, 32)     128       
 ation)                                                          
                                                                 
 conv_dw_1_relu (ReLU)       (None, 112, 112, 32)      0         
                                                                 
 conv_pw_1 (Conv2D)          (None, 112, 112, 64)      2048      
                                                                 
 conv_pw_1_bn (BatchNormaliz  (None, 112, 112, 64)     256       
 ation)                                                          
                                                                 
 conv_pw_1_relu (ReLU)       (None, 112, 112, 64)      0         
                                                                 
 conv_pad_2 (ZeroPadding2D)  (None, 113, 113, 64)      0         
                                                                 
 conv_dw_2 (DepthwiseConv2D)  (None, 56, 56, 64)       576       
                                                                 
 conv_dw_2_bn (BatchNormaliz  (None, 56, 56, 64)       256       
 ation)                                                          
                                                                 
 conv_dw_2_relu (ReLU)       (None, 56, 56, 64)        0         
                                                                 
 conv_pw_2 (Conv2D)          (None, 56, 56, 128)       8192      
                                                                 
 conv_pw_2_bn (BatchNormaliz  (None, 56, 56, 128)      512       
 ation)                                                          
                                                                 
 conv_pw_2_relu (ReLU)       (None, 56, 56, 128)       0         
                                                                 
 conv_dw_3 (DepthwiseConv2D)  (None, 56, 56, 128)      1152      
                                                                 
 conv_dw_3_bn (BatchNormaliz  (None, 56, 56, 128)      512       
 ation)                                                          
                                                                 
 conv_dw_3_relu (ReLU)       (None, 56, 56, 128)       0         
                                                                 
 conv_pw_3 (Conv2D)          (None, 56, 56, 128)       16384     
                                                                 
 conv_pw_3_bn (BatchNormaliz  (None, 56, 56, 128)      512       
 ation)                                                          
                                                                 
 conv_pw_3_relu (ReLU)       (None, 56, 56, 128)       0         
                                                                 
 conv_pad_4 (ZeroPadding2D)  (None, 57, 57, 128)       0         
                                                                 
 conv_dw_4 (DepthwiseConv2D)  (None, 28, 28, 128)      1152      
                                                                 
 conv_dw_4_bn (BatchNormaliz  (None, 28, 28, 128)      512       
 ation)                                                          
                                                                 
 conv_dw_4_relu (ReLU)       (None, 28, 28, 128)       0         
                                                                 
 conv_pw_4 (Conv2D)          (None, 28, 28, 256)       32768     
                                                                 
 conv_pw_4_bn (BatchNormaliz  (None, 28, 28, 256)      1024      
 ation)                                                          
                                                                 
 conv_pw_4_relu (ReLU)       (None, 28, 28, 256)       0         
                                                                 
 conv_dw_5 (DepthwiseConv2D)  (None, 28, 28, 256)      2304      
                                                                 
 conv_dw_5_bn (BatchNormaliz  (None, 28, 28, 256)      1024      
 ation)                                                          
                                                                 
 conv_dw_5_relu (ReLU)       (None, 28, 28, 256)       0         
                                                                 
 conv_pw_5 (Conv2D)          (None, 28, 28, 256)       65536     
                                                                 
 conv_pw_5_bn (BatchNormaliz  (None, 28, 28, 256)      1024      
 ation)                                                          
                                                                 
 conv_pw_5_relu (ReLU)       (None, 28, 28, 256)       0         
                                                                 
 conv_pad_6 (ZeroPadding2D)  (None, 29, 29, 256)       0         
                                                                 
 conv_dw_6 (DepthwiseConv2D)  (None, 14, 14, 256)      2304      
                                                                 
 conv_dw_6_bn (BatchNormaliz  (None, 14, 14, 256)      1024      
 ation)                                                          
                                                                 
 conv_dw_6_relu (ReLU)       (None, 14, 14, 256)       0         
                                                                 
 conv_pw_6 (Conv2D)          (None, 14, 14, 512)       131072    
                                                                 
 conv_pw_6_bn (BatchNormaliz  (None, 14, 14, 512)      2048      
 ation)                                                          
                                                                 
 conv_pw_6_relu (ReLU)       (None, 14, 14, 512)       0         
                                                                 
 conv_dw_7 (DepthwiseConv2D)  (None, 14, 14, 512)      4608      
                                                                 
 conv_dw_7_bn (BatchNormaliz  (None, 14, 14, 512)      2048      
 ation)                                                          
                                                                 
 conv_dw_7_relu (ReLU)       (None, 14, 14, 512)       0         
                                                                 
 conv_pw_7 (Conv2D)          (None, 14, 14, 512)       262144    
                                                                 
 conv_pw_7_bn (BatchNormaliz  (None, 14, 14, 512)      2048      
 ation)                                                          
                                                                 
 conv_pw_7_relu (ReLU)       (None, 14, 14, 512)       0         
                                                                 
 conv_dw_8 (DepthwiseConv2D)  (None, 14, 14, 512)      4608      
                                                                 
 conv_dw_8_bn (BatchNormaliz  (None, 14, 14, 512)      2048      
 ation)                                                          
                                                                 
 conv_dw_8_relu (ReLU)       (None, 14, 14, 512)       0         
                                                                 
 conv_pw_8 (Conv2D)          (None, 14, 14, 512)       262144    
                                                                 
 conv_pw_8_bn (BatchNormaliz  (None, 14, 14, 512)      2048      
 ation)                                                          
                                                                 
 conv_pw_8_relu (ReLU)       (None, 14, 14, 512)       0         
                                                                 
 conv_dw_9 (DepthwiseConv2D)  (None, 14, 14, 512)      4608      
                                                                 
 conv_dw_9_bn (BatchNormaliz  (None, 14, 14, 512)      2048      
 ation)                                                          
                                                                 
 conv_dw_9_relu (ReLU)       (None, 14, 14, 512)       0         
                                                                 
 conv_pw_9 (Conv2D)          (None, 14, 14, 512)       262144    
                                                                 
 conv_pw_9_bn (BatchNormaliz  (None, 14, 14, 512)      2048      
 ation)                                                          
                                                                 
 conv_pw_9_relu (ReLU)       (None, 14, 14, 512)       0         
                                                                 
 conv_dw_10 (DepthwiseConv2D  (None, 14, 14, 512)      4608      
 )                                                               
                                                                 
 conv_dw_10_bn (BatchNormali  (None, 14, 14, 512)      2048      
 zation)                                                         
                                                                 
 conv_dw_10_relu (ReLU)      (None, 14, 14, 512)       0         
                                                                 
 conv_pw_10 (Conv2D)         (None, 14, 14, 512)       262144    
                                                                 
 conv_pw_10_bn (BatchNormali  (None, 14, 14, 512)      2048      
 zation)                                                         
                                                                 
 conv_pw_10_relu (ReLU)      (None, 14, 14, 512)       0         
                                                                 
 conv_dw_11 (DepthwiseConv2D  (None, 14, 14, 512)      4608      
 )                                                               
                                                                 
 conv_dw_11_bn (BatchNormali  (None, 14, 14, 512)      2048      
 zation)                                                         
                                                                 
 conv_dw_11_relu (ReLU)      (None, 14, 14, 512)       0         
                                                                 
 conv_pw_11 (Conv2D)         (None, 14, 14, 512)       262144    
                                                                 
 conv_pw_11_bn (BatchNormali  (None, 14, 14, 512)      2048      
 zation)                                                         
                                                                 
 conv_pw_11_relu (ReLU)      (None, 14, 14, 512)       0         
                                                                 
 conv_pad_12 (ZeroPadding2D)  (None, 15, 15, 512)      0         
                                                                 
 conv_dw_12 (DepthwiseConv2D  (None, 7, 7, 512)        4608      
 )                                                               
                                                                 
 conv_dw_12_bn (BatchNormali  (None, 7, 7, 512)        2048      
 zation)                                                         
                                                                 
 conv_dw_12_relu (ReLU)      (None, 7, 7, 512)         0         
                                                                 
 conv_pw_12 (Conv2D)         (None, 7, 7, 1024)        524288    
                                                                 
 conv_pw_12_bn (BatchNormali  (None, 7, 7, 1024)       4096      
 zation)                                                         
                                                                 
 conv_pw_12_relu (ReLU)      (None, 7, 7, 1024)        0         
                                                                 
 conv_dw_13 (DepthwiseConv2D  (None, 7, 7, 1024)       9216      
 )                                                               
                                                                 
 conv_dw_13_bn (BatchNormali  (None, 7, 7, 1024)       4096      
 zation)                                                         
                                                                 
 conv_dw_13_relu (ReLU)      (None, 7, 7, 1024)        0         
                                                                 
 conv_pw_13 (Conv2D)         (None, 7, 7, 1024)        1048576   
                                                                 
 conv_pw_13_bn (BatchNormali  (None, 7, 7, 1024)       4096      
 zation)                                                         
                                                                 
 conv_pw_13_relu (ReLU)      (None, 7, 7, 1024)        0         
                                                                 
 global_average_pooling2d_33  (None, 1024)             0         
  (GlobalAveragePooling2D)                                       
                                                                 
 dense_116 (Dense)           (None, 1)                 1025      
                                                                 
=================================================================
Total params: 3,229,889
Trainable params: 3,208,001
Non-trainable params: 21,888
_________________________________________________________________
In [ ]:
model_mobil.save('modelo_vista/mobilenet.h5')

Entrenamiento¶

En caso de poner alguna semilla para que no varie el modelo

In [ ]:
# Semilla 
tf.random.set_seed(
    12345
)

Modelo Secuencial Propios¶

In [ ]:
model_mobil_trained = model_mobil.fit(
    train_generator_oversampling,
        steps_per_epoch = 10,
        epochs=20, 
        validation_data=validation_generator,
        #callbacks=[early_stopping_propio1],
        class_weight=class_weight_oversampled,
        verbose=1
)
Epoch 1/20
2023-06-22 20:27:40.517220: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
10/10 [==============================] - ETA: 0s - loss: 0.4141 - binary_accuracy: 0.8422 - mae: 0.1662
2023-06-22 20:28:48.928243: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
10/10 [==============================] - 128s 7s/step - loss: 0.4141 - binary_accuracy: 0.8422 - mae: 0.1662 - val_loss: 0.2652 - val_binary_accuracy: 0.8892 - val_mae: 0.1416
Epoch 2/20
10/10 [==============================] - 40s 4s/step - loss: 0.2053 - binary_accuracy: 0.9453 - mae: 0.0666 - val_loss: 1.2355 - val_binary_accuracy: 0.7240 - val_mae: 0.2817
Epoch 3/20
10/10 [==============================] - 33s 3s/step - loss: 0.2353 - binary_accuracy: 0.9469 - mae: 0.0633 - val_loss: 9.1138 - val_binary_accuracy: 0.2646 - val_mae: 0.7310
Epoch 4/20
10/10 [==============================] - 36s 4s/step - loss: 0.1857 - binary_accuracy: 0.9469 - mae: 0.0644 - val_loss: 0.2868 - val_binary_accuracy: 0.9169 - val_mae: 0.0960
Epoch 5/20
10/10 [==============================] - 31s 3s/step - loss: 0.1423 - binary_accuracy: 0.9531 - mae: 0.0515 - val_loss: 0.2586 - val_binary_accuracy: 0.9245 - val_mae: 0.0838
Epoch 6/20
10/10 [==============================] - 32s 3s/step - loss: 0.1361 - binary_accuracy: 0.9563 - mae: 0.0483 - val_loss: 1.0361 - val_binary_accuracy: 0.8138 - val_mae: 0.1818
Epoch 7/20
10/10 [==============================] - 33s 3s/step - loss: 0.1281 - binary_accuracy: 0.9719 - mae: 0.0387 - val_loss: 0.6102 - val_binary_accuracy: 0.8854 - val_mae: 0.1182
Epoch 8/20
10/10 [==============================] - 33s 3s/step - loss: 0.1085 - binary_accuracy: 0.9609 - mae: 0.0439 - val_loss: 0.5053 - val_binary_accuracy: 0.8988 - val_mae: 0.1035
Epoch 9/20
10/10 [==============================] - 32s 3s/step - loss: 0.0598 - binary_accuracy: 0.9828 - mae: 0.0266 - val_loss: 0.1772 - val_binary_accuracy: 0.9532 - val_mae: 0.0530
Epoch 10/20
10/10 [==============================] - 30s 3s/step - loss: 0.1119 - binary_accuracy: 0.9734 - mae: 0.0357 - val_loss: 0.0628 - val_binary_accuracy: 0.9790 - val_mae: 0.0279
Epoch 11/20
10/10 [==============================] - 30s 3s/step - loss: 0.0724 - binary_accuracy: 0.9797 - mae: 0.0335 - val_loss: 0.1534 - val_binary_accuracy: 0.9580 - val_mae: 0.0479
Epoch 12/20
10/10 [==============================] - 30s 3s/step - loss: 0.0813 - binary_accuracy: 0.9750 - mae: 0.0342 - val_loss: 1.9299 - val_binary_accuracy: 0.6839 - val_mae: 0.3185
Epoch 13/20
10/10 [==============================] - 31s 3s/step - loss: 0.0747 - binary_accuracy: 0.9766 - mae: 0.0322 - val_loss: 0.7236 - val_binary_accuracy: 0.8262 - val_mae: 0.1715
Epoch 14/20
10/10 [==============================] - 31s 3s/step - loss: 0.0569 - binary_accuracy: 0.9844 - mae: 0.0235 - val_loss: 0.5959 - val_binary_accuracy: 0.8749 - val_mae: 0.1292
Epoch 15/20
10/10 [==============================] - 30s 3s/step - loss: 0.0792 - binary_accuracy: 0.9766 - mae: 0.0289 - val_loss: 1.3358 - val_binary_accuracy: 0.7545 - val_mae: 0.2473
Epoch 16/20
10/10 [==============================] - 31s 3s/step - loss: 0.0445 - binary_accuracy: 0.9844 - mae: 0.0234 - val_loss: 1.3263 - val_binary_accuracy: 0.7603 - val_mae: 0.2391
Epoch 17/20
10/10 [==============================] - 31s 3s/step - loss: 0.0563 - binary_accuracy: 0.9828 - mae: 0.0222 - val_loss: 1.5246 - val_binary_accuracy: 0.7536 - val_mae: 0.2467
Epoch 18/20
10/10 [==============================] - 31s 3s/step - loss: 0.0535 - binary_accuracy: 0.9812 - mae: 0.0265 - val_loss: 0.9426 - val_binary_accuracy: 0.8329 - val_mae: 0.1738
Epoch 19/20
10/10 [==============================] - 31s 3s/step - loss: 0.0730 - binary_accuracy: 0.9812 - mae: 0.0256 - val_loss: 0.4565 - val_binary_accuracy: 0.9150 - val_mae: 0.0887
Epoch 20/20
10/10 [==============================] - 32s 3s/step - loss: 0.0634 - binary_accuracy: 0.9828 - mae: 0.0262 - val_loss: 0.1358 - val_binary_accuracy: 0.9656 - val_mae: 0.0383

Modelo Propio 1¶

In [ ]:
modelo_propio1_trained = modelo_propio1.fit(
        train_generator_oversampling,
        steps_per_epoch = 32,
        epochs=20, 
        validation_data=validation_generator,
        callbacks=[early_stopping_propio1],
        class_weight=class_weight_oversampled,
        verbose=1
)
Epoch 1/20
2023-06-21 16:50:15.190488: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
32/32 [==============================] - ETA: 0s - loss: 2.6324 - accuracy: 0.5552 - precision: 0.6061 - recall: 0.6825
2023-06-21 16:51:21.978681: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
32/32 [==============================] - 78s 2s/step - loss: 2.6324 - accuracy: 0.5552 - precision: 0.6061 - recall: 0.6825 - val_loss: 0.5623 - val_accuracy: 0.7908 - val_precision: 0.8000 - val_recall: 0.9590
Epoch 2/20
32/32 [==============================] - 73s 2s/step - loss: 0.6180 - accuracy: 0.7673 - precision: 0.7911 - recall: 0.8129 - val_loss: 0.4236 - val_accuracy: 0.8290 - val_precision: 0.9465 - val_recall: 0.8167
Epoch 3/20
32/32 [==============================] - 74s 2s/step - loss: 0.4837 - accuracy: 0.8589 - precision: 0.9154 - recall: 0.8433 - val_loss: 0.3530 - val_accuracy: 0.8691 - val_precision: 0.9879 - val_recall: 0.8346
Epoch 4/20
32/32 [==============================] - 73s 2s/step - loss: 0.4455 - accuracy: 0.8677 - precision: 0.9082 - recall: 0.8695 - val_loss: 0.2681 - val_accuracy: 0.9045 - val_precision: 0.9816 - val_recall: 0.8885
Epoch 5/20
32/32 [==============================] - 75s 2s/step - loss: 0.4270 - accuracy: 0.8623 - precision: 0.9024 - recall: 0.8589 - val_loss: 0.3176 - val_accuracy: 0.8902 - val_precision: 0.9840 - val_recall: 0.8667
Epoch 6/20
32/32 [==============================] - 74s 2s/step - loss: 0.4460 - accuracy: 0.8604 - precision: 0.8969 - recall: 0.8661 - val_loss: 0.2949 - val_accuracy: 0.8873 - val_precision: 0.9896 - val_recall: 0.8577
Epoch 7/20
32/32 [==============================] - 74s 2s/step - loss: 0.4259 - accuracy: 0.8613 - precision: 0.8965 - recall: 0.8620 - val_loss: 0.4417 - val_accuracy: 0.8099 - val_precision: 0.9949 - val_recall: 0.7487
Epoch 8/20
32/32 [==============================] - 75s 2s/step - loss: 0.4354 - accuracy: 0.8691 - precision: 0.9324 - recall: 0.8402 - val_loss: 0.3107 - val_accuracy: 0.8749 - val_precision: 0.9924 - val_recall: 0.8385
Epoch 9/20
32/32 [==============================] - 73s 2s/step - loss: 0.3942 - accuracy: 0.8842 - precision: 0.9365 - recall: 0.8620 - val_loss: 0.2633 - val_accuracy: 0.9007 - val_precision: 0.9884 - val_recall: 0.8769
Epoch 10/20
32/32 [==============================] - 75s 2s/step - loss: 0.3907 - accuracy: 0.8853 - precision: 0.9220 - recall: 0.8820 - val_loss: 0.4395 - val_accuracy: 0.8357 - val_precision: 0.9967 - val_recall: 0.7821
Epoch 11/20
32/32 [==============================] - 75s 2s/step - loss: 0.3963 - accuracy: 0.8877 - precision: 0.9366 - recall: 0.8684 - val_loss: 0.2411 - val_accuracy: 0.9112 - val_precision: 0.9872 - val_recall: 0.8923
Epoch 12/20
32/32 [==============================] - 73s 2s/step - loss: 0.3957 - accuracy: 0.8794 - precision: 0.9077 - recall: 0.8860 - val_loss: 0.4614 - val_accuracy: 0.8243 - val_precision: 0.9967 - val_recall: 0.7667
Epoch 13/20
32/32 [==============================] - 73s 2s/step - loss: 0.3879 - accuracy: 0.8872 - precision: 0.9277 - recall: 0.8800 - val_loss: 0.4678 - val_accuracy: 0.8357 - val_precision: 0.9951 - val_recall: 0.7833
Epoch 14/20
32/32 [==============================] - 74s 2s/step - loss: 0.3748 - accuracy: 0.8848 - precision: 0.9082 - recall: 0.8863 - val_loss: 0.3684 - val_accuracy: 0.8577 - val_precision: 0.9937 - val_recall: 0.8141
Epoch 15/20
32/32 [==============================] - 73s 2s/step - loss: 0.3691 - accuracy: 0.8911 - precision: 0.9261 - recall: 0.8872 - val_loss: 0.4616 - val_accuracy: 0.8290 - val_precision: 0.9951 - val_recall: 0.7744
Epoch 16/20
32/32 [==============================] - 74s 2s/step - loss: 0.3420 - accuracy: 0.9004 - precision: 0.9382 - recall: 0.8902 - val_loss: 0.6198 - val_accuracy: 0.7536 - val_precision: 0.9981 - val_recall: 0.6705
Epoch 17/20
32/32 [==============================] - 74s 2s/step - loss: 0.3594 - accuracy: 0.8853 - precision: 0.9156 - recall: 0.8858 - val_loss: 0.2518 - val_accuracy: 0.9045 - val_precision: 0.9913 - val_recall: 0.8795
Epoch 18/20
32/32 [==============================] - 73s 2s/step - loss: 0.3228 - accuracy: 0.9096 - precision: 0.9446 - recall: 0.8988 - val_loss: 0.3883 - val_accuracy: 0.8625 - val_precision: 0.9938 - val_recall: 0.8205
Epoch 19/20
32/32 [==============================] - 75s 2s/step - loss: 0.3683 - accuracy: 0.8936 - precision: 0.9324 - recall: 0.8849 - val_loss: 0.2053 - val_accuracy: 0.9265 - val_precision: 0.9769 - val_recall: 0.9231
Epoch 20/20
32/32 [==============================] - 74s 2s/step - loss: 0.3823 - accuracy: 0.8774 - precision: 0.8961 - recall: 0.8983 - val_loss: 0.2874 - val_accuracy: 0.8949 - val_precision: 0.9956 - val_recall: 0.8628

Modelo Propio 2¶

In [ ]:
modelo_propio2_trained = modelo_propio2.fit(
        train_generator_oversampling,
        steps_per_epoch = 32,
        epochs=20, 
        validation_data=validation_generator,
        class_weight=class_weight_oversampled,
        callbacks=[early_stopping_propio2]
)
Epoch 1/20
2023-06-21 18:07:17.868708: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
32/32 [==============================] - ETA: 0s - loss: 0.7782 - accuracy: 0.6694 - precision: 0.6567 - recall: 0.9142
2023-06-21 18:08:28.897123: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
32/32 [==============================] - 84s 2s/step - loss: 0.7782 - accuracy: 0.6694 - precision: 0.6567 - recall: 0.9142 - val_loss: 0.4926 - val_accuracy: 0.7947 - val_precision: 0.8149 - val_recall: 0.9372
Epoch 2/20
32/32 [==============================] - 77s 2s/step - loss: 0.4774 - accuracy: 0.8398 - precision: 0.8795 - recall: 0.8430 - val_loss: 0.4560 - val_accuracy: 0.7870 - val_precision: 0.9912 - val_recall: 0.7205
Epoch 3/20
32/32 [==============================] - 77s 2s/step - loss: 0.4250 - accuracy: 0.8647 - precision: 0.9211 - recall: 0.8425 - val_loss: 0.4271 - val_accuracy: 0.7899 - val_precision: 0.9947 - val_recall: 0.7218
Epoch 4/20
32/32 [==============================] - 77s 2s/step - loss: 0.4098 - accuracy: 0.8706 - precision: 0.9159 - recall: 0.8589 - val_loss: 0.3555 - val_accuracy: 0.8376 - val_precision: 0.9872 - val_recall: 0.7923
Epoch 5/20
32/32 [==============================] - 77s 2s/step - loss: 0.3969 - accuracy: 0.8809 - precision: 0.9255 - recall: 0.8712 - val_loss: 0.3711 - val_accuracy: 0.8243 - val_precision: 0.9885 - val_recall: 0.7731
Epoch 6/20
32/32 [==============================] - 78s 2s/step - loss: 0.3745 - accuracy: 0.8774 - precision: 0.9229 - recall: 0.8661 - val_loss: 0.2528 - val_accuracy: 0.8959 - val_precision: 0.9666 - val_recall: 0.8910
Epoch 7/20
32/32 [==============================] - 78s 2s/step - loss: 0.3689 - accuracy: 0.8887 - precision: 0.9394 - recall: 0.8644 - val_loss: 0.3609 - val_accuracy: 0.8348 - val_precision: 0.9935 - val_recall: 0.7833
Epoch 8/20
32/32 [==============================] - 77s 2s/step - loss: 0.3703 - accuracy: 0.8896 - precision: 0.9352 - recall: 0.8732 - val_loss: 0.4159 - val_accuracy: 0.8080 - val_precision: 0.9932 - val_recall: 0.7474
Epoch 9/20
32/32 [==============================] - 77s 2s/step - loss: 0.3473 - accuracy: 0.8952 - precision: 0.9418 - recall: 0.8789 - val_loss: 0.4113 - val_accuracy: 0.8233 - val_precision: 0.9934 - val_recall: 0.7679
Epoch 10/20
32/32 [==============================] - 78s 2s/step - loss: 0.3409 - accuracy: 0.8970 - precision: 0.9403 - recall: 0.8800 - val_loss: 0.2048 - val_accuracy: 0.9226 - val_precision: 0.9569 - val_recall: 0.9385
Epoch 11/20
32/32 [==============================] - 80s 2s/step - loss: 0.3485 - accuracy: 0.8823 - precision: 0.9150 - recall: 0.8806 - val_loss: 0.2266 - val_accuracy: 0.9284 - val_precision: 0.9596 - val_recall: 0.9436
Epoch 12/20
32/32 [==============================] - 78s 2s/step - loss: 0.3328 - accuracy: 0.8945 - precision: 0.9363 - recall: 0.8803 - val_loss: 0.2263 - val_accuracy: 0.9226 - val_precision: 0.9834 - val_recall: 0.9115
Epoch 13/20
32/32 [==============================] - 77s 2s/step - loss: 0.3586 - accuracy: 0.8842 - precision: 0.9257 - recall: 0.8740 - val_loss: 0.2237 - val_accuracy: 0.9064 - val_precision: 0.9844 - val_recall: 0.8885
Epoch 14/20
32/32 [==============================] - 78s 2s/step - loss: 0.3393 - accuracy: 0.8950 - precision: 0.9309 - recall: 0.8847 - val_loss: 0.5094 - val_accuracy: 0.7202 - val_precision: 1.0000 - val_recall: 0.6244
Epoch 15/20
32/32 [==============================] - 77s 2s/step - loss: 0.3326 - accuracy: 0.8979 - precision: 0.9435 - recall: 0.8781 - val_loss: 0.3634 - val_accuracy: 0.8386 - val_precision: 0.9935 - val_recall: 0.7885
Epoch 16/20
32/32 [==============================] - 78s 2s/step - loss: 0.3584 - accuracy: 0.8777 - precision: 0.9167 - recall: 0.8726 - val_loss: 0.4267 - val_accuracy: 0.7832 - val_precision: 1.0000 - val_recall: 0.7090
Epoch 17/20
32/32 [==============================] - 78s 2s/step - loss: 0.3000 - accuracy: 0.9077 - precision: 0.9414 - recall: 0.9003 - val_loss: 0.3931 - val_accuracy: 0.8290 - val_precision: 0.9951 - val_recall: 0.7744

Modelo Propio 3¶

In [ ]:
modelo_propio3_trained = modelo_propio3.fit(
        train_generator_oversampling,
        steps_per_epoch = 32,
        epochs=20, 
        validation_data=validation_generator,
        class_weight=class_weight_oversampled,
        callbacks=[early_stopping_propio2]
)
Epoch 1/20
2023-06-21 19:28:02.890323: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
32/32 [==============================] - ETA: 0s - loss: 1.8686 - accuracy: 0.6715 - precision: 0.7076 - recall: 0.7309
2023-06-21 19:29:11.155358: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
32/32 [==============================] - 81s 2s/step - loss: 1.8686 - accuracy: 0.6715 - precision: 0.7076 - recall: 0.7309 - val_loss: 0.4544 - val_accuracy: 0.7927 - val_precision: 0.8768 - val_recall: 0.8397
Epoch 2/20
32/32 [==============================] - 76s 2s/step - loss: 0.5611 - accuracy: 0.7798 - precision: 0.8168 - recall: 0.8134 - val_loss: 0.5597 - val_accuracy: 0.6800 - val_precision: 0.9764 - val_recall: 0.5846
Epoch 3/20
32/32 [==============================] - 76s 2s/step - loss: 0.5907 - accuracy: 0.7759 - precision: 0.7928 - recall: 0.8308 - val_loss: 0.5805 - val_accuracy: 0.7698 - val_precision: 0.8468 - val_recall: 0.8436
Epoch 4/20
32/32 [==============================] - 76s 2s/step - loss: 0.5899 - accuracy: 0.8062 - precision: 0.8622 - recall: 0.8070 - val_loss: 0.6018 - val_accuracy: 0.7096 - val_precision: 0.8400 - val_recall: 0.7538
Epoch 5/20
32/32 [==============================] - 76s 2s/step - loss: 0.5110 - accuracy: 0.8320 - precision: 0.8824 - recall: 0.8306 - val_loss: 0.4623 - val_accuracy: 0.7717 - val_precision: 0.9155 - val_recall: 0.7641
Epoch 6/20
32/32 [==============================] - 75s 2s/step - loss: 0.4986 - accuracy: 0.8452 - precision: 0.8991 - recall: 0.8276 - val_loss: 0.4791 - val_accuracy: 0.7650 - val_precision: 0.7980 - val_recall: 0.9167
Epoch 7/20
32/32 [==============================] - 75s 2s/step - loss: 0.5105 - accuracy: 0.8457 - precision: 0.9099 - recall: 0.8150 - val_loss: 0.3394 - val_accuracy: 0.8672 - val_precision: 0.9470 - val_recall: 0.8705
Epoch 8/20
32/32 [==============================] - 77s 2s/step - loss: 0.4456 - accuracy: 0.8525 - precision: 0.9000 - recall: 0.8470 - val_loss: 0.3436 - val_accuracy: 0.8586 - val_precision: 0.9862 - val_recall: 0.8218
Epoch 9/20
32/32 [==============================] - 75s 2s/step - loss: 0.4170 - accuracy: 0.8721 - precision: 0.9129 - recall: 0.8679 - val_loss: 0.4928 - val_accuracy: 0.7985 - val_precision: 0.9982 - val_recall: 0.7308
Epoch 10/20
32/32 [==============================] - 76s 2s/step - loss: 0.4105 - accuracy: 0.8706 - precision: 0.9077 - recall: 0.8621 - val_loss: 0.3690 - val_accuracy: 0.8262 - val_precision: 0.9886 - val_recall: 0.7756
Epoch 11/20
32/32 [==============================] - 76s 2s/step - loss: 0.4255 - accuracy: 0.8677 - precision: 0.9123 - recall: 0.8569 - val_loss: 0.2235 - val_accuracy: 0.9179 - val_precision: 0.9506 - val_recall: 0.9385
Epoch 12/20
32/32 [==============================] - 75s 2s/step - loss: 0.3493 - accuracy: 0.8911 - precision: 0.9185 - recall: 0.8950 - val_loss: 0.9138 - val_accuracy: 0.6390 - val_precision: 1.0000 - val_recall: 0.5154
Epoch 13/20
32/32 [==============================] - 76s 2s/step - loss: 0.4556 - accuracy: 0.8340 - precision: 0.8548 - recall: 0.8647 - val_loss: 0.2967 - val_accuracy: 0.8863 - val_precision: 0.9448 - val_recall: 0.9000
Epoch 14/20
32/32 [==============================] - 76s 2s/step - loss: 0.4102 - accuracy: 0.8726 - precision: 0.9198 - recall: 0.8536 - val_loss: 0.2313 - val_accuracy: 0.9016 - val_precision: 0.9829 - val_recall: 0.8833
Epoch 15/20
32/32 [==============================] - 75s 2s/step - loss: 0.4828 - accuracy: 0.8237 - precision: 0.8278 - recall: 0.8799 - val_loss: 0.3761 - val_accuracy: 0.8166 - val_precision: 0.9682 - val_recall: 0.7795
Epoch 16/20
32/32 [==============================] - 73s 2s/step - loss: 0.4442 - accuracy: 0.8567 - precision: 0.8969 - recall: 0.8603 - val_loss: 0.2874 - val_accuracy: 0.8816 - val_precision: 0.9713 - val_recall: 0.8667
Epoch 17/20
32/32 [==============================] - 75s 2s/step - loss: 0.3791 - accuracy: 0.8877 - precision: 0.9338 - recall: 0.8700 - val_loss: 0.3283 - val_accuracy: 0.8634 - val_precision: 0.9938 - val_recall: 0.8218
Epoch 18/20
32/32 [==============================] - 74s 2s/step - loss: 0.3809 - accuracy: 0.8779 - precision: 0.9088 - recall: 0.8817 - val_loss: 0.2879 - val_accuracy: 0.8777 - val_precision: 0.9851 - val_recall: 0.8487

Modelos prentrenados¶

InceptionV3¶

In [ ]:
inceptionv3_modelo_imagenet_trained2 = inceptionv3_modelo_imagenet.fit(
    train_generator_oversampling,
    epochs=35, 
    steps_per_epoch= 10,
    validation_data=validation_generator, 
    class_weight= class_weight_oversampled,
    callbacks=[checkpoint_inception, early_stopping_inception])
Epoch 1/35
2023-06-22 18:58:01.991616: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
10/10 [==============================] - ETA: 0s - loss: 0.4236 - accuracy: 0.8706 - precision: 0.9273 - recall: 0.8524
2023-06-22 18:59:22.056330: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
10/10 [==============================] - 141s 12s/step - loss: 0.4236 - accuracy: 0.8706 - precision: 0.9273 - recall: 0.8524 - val_loss: 0.6271 - val_accuracy: 0.6399 - val_precision: 0.9975 - val_recall: 0.5179
Epoch 2/35
10/10 [==============================] - 46s 5s/step - loss: 0.2016 - accuracy: 0.9359 - precision: 0.9528 - recall: 0.9346 - val_loss: 0.6225 - val_accuracy: 0.7106 - val_precision: 1.0000 - val_recall: 0.6115
Epoch 3/35
10/10 [==============================] - 34s 3s/step - loss: 0.1672 - accuracy: 0.9516 - precision: 0.9631 - recall: 0.9555 - val_loss: 0.5162 - val_accuracy: 0.7689 - val_precision: 1.0000 - val_recall: 0.6897
Epoch 4/35
10/10 [==============================] - 32s 3s/step - loss: 0.1550 - accuracy: 0.9531 - precision: 0.9555 - recall: 0.9656 - val_loss: 0.4856 - val_accuracy: 0.8013 - val_precision: 1.0000 - val_recall: 0.7333
Epoch 5/35
10/10 [==============================] - 33s 3s/step - loss: 0.1488 - accuracy: 0.9563 - precision: 0.9734 - recall: 0.9531 - val_loss: 0.5263 - val_accuracy: 0.8052 - val_precision: 1.0000 - val_recall: 0.7385
Epoch 6/35
10/10 [==============================] - 34s 3s/step - loss: 0.1236 - accuracy: 0.9656 - precision: 0.9812 - recall: 0.9605 - val_loss: 0.5725 - val_accuracy: 0.7947 - val_precision: 0.9982 - val_recall: 0.7256
Epoch 7/35
10/10 [==============================] - 33s 3s/step - loss: 0.1215 - accuracy: 0.9609 - precision: 0.9597 - recall: 0.9769 - val_loss: 0.5144 - val_accuracy: 0.8185 - val_precision: 0.9983 - val_recall: 0.7577
Epoch 8/35
10/10 [==============================] - 34s 3s/step - loss: 0.1232 - accuracy: 0.9656 - precision: 0.9786 - recall: 0.9632 - val_loss: 0.6125 - val_accuracy: 0.7908 - val_precision: 0.9982 - val_recall: 0.7205
Epoch 9/35
10/10 [==============================] - 31s 3s/step - loss: 0.0859 - accuracy: 0.9797 - precision: 0.9833 - recall: 0.9806 - val_loss: 0.4431 - val_accuracy: 0.8424 - val_precision: 0.9968 - val_recall: 0.7910
Epoch 10/35
10/10 [==============================] - 34s 3s/step - loss: 0.0900 - accuracy: 0.9781 - precision: 0.9848 - recall: 0.9798 - val_loss: 0.4231 - val_accuracy: 0.8634 - val_precision: 0.9953 - val_recall: 0.8205
Epoch 11/35
10/10 [==============================] - 31s 3s/step - loss: 0.0746 - accuracy: 0.9750 - precision: 0.9752 - recall: 0.9850 - val_loss: 0.4352 - val_accuracy: 0.8634 - val_precision: 0.9969 - val_recall: 0.8192
Epoch 12/35
10/10 [==============================] - 31s 3s/step - loss: 0.0619 - accuracy: 0.9828 - precision: 0.9892 - recall: 0.9813 - val_loss: 0.6377 - val_accuracy: 0.8099 - val_precision: 0.9983 - val_recall: 0.7462
Epoch 13/35
10/10 [==============================] - 31s 3s/step - loss: 0.0712 - accuracy: 0.9797 - precision: 0.9893 - recall: 0.9763 - val_loss: 0.5105 - val_accuracy: 0.8472 - val_precision: 0.9968 - val_recall: 0.7974
Epoch 14/35
10/10 [==============================] - 30s 3s/step - loss: 0.0737 - accuracy: 0.9828 - precision: 0.9865 - recall: 0.9838 - val_loss: 0.6386 - val_accuracy: 0.8157 - val_precision: 0.9983 - val_recall: 0.7538
Epoch 15/35
10/10 [==============================] - 33s 3s/step - loss: 0.0740 - accuracy: 0.9828 - precision: 0.9973 - recall: 0.9741 - val_loss: 0.3738 - val_accuracy: 0.8873 - val_precision: 0.9970 - val_recall: 0.8513
Epoch 16/35
10/10 [==============================] - 31s 3s/step - loss: 0.0398 - accuracy: 0.9953 - precision: 0.9973 - recall: 0.9945 - val_loss: 0.2361 - val_accuracy: 0.9217 - val_precision: 0.9972 - val_recall: 0.8974
Epoch 17/35
10/10 [==============================] - 30s 3s/step - loss: 0.0561 - accuracy: 0.9859 - precision: 0.9926 - recall: 0.9853 - val_loss: 0.3203 - val_accuracy: 0.9064 - val_precision: 0.9985 - val_recall: 0.8756
Epoch 18/35
10/10 [==============================] - 33s 3s/step - loss: 0.0700 - accuracy: 0.9844 - precision: 0.9870 - recall: 0.9870 - val_loss: 0.2707 - val_accuracy: 0.9131 - val_precision: 0.9986 - val_recall: 0.8846
Epoch 19/35
10/10 [==============================] - 31s 3s/step - loss: 0.0414 - accuracy: 0.9906 - precision: 0.9891 - recall: 0.9945 - val_loss: 0.3379 - val_accuracy: 0.8988 - val_precision: 0.9985 - val_recall: 0.8654
Epoch 20/35
10/10 [==============================] - 31s 3s/step - loss: 0.0442 - accuracy: 0.9844 - precision: 0.9918 - recall: 0.9811 - val_loss: 0.3594 - val_accuracy: 0.8949 - val_precision: 0.9970 - val_recall: 0.8615
Epoch 21/35
10/10 [==============================] - 31s 3s/step - loss: 0.0588 - accuracy: 0.9797 - precision: 0.9922 - recall: 0.9744 - val_loss: 0.1873 - val_accuracy: 0.9522 - val_precision: 0.9946 - val_recall: 0.9410
Epoch 22/35
10/10 [==============================] - 29s 3s/step - loss: 0.0501 - accuracy: 0.9849 - precision: 0.9803 - recall: 0.9943 - val_loss: 0.2535 - val_accuracy: 0.9331 - val_precision: 0.9958 - val_recall: 0.9141
Epoch 23/35
10/10 [==============================] - 29s 3s/step - loss: 0.0351 - accuracy: 0.9899 - precision: 0.9946 - recall: 0.9892 - val_loss: 0.2015 - val_accuracy: 0.9436 - val_precision: 0.9959 - val_recall: 0.9282
Epoch 24/35
10/10 [==============================] - 30s 3s/step - loss: 0.0516 - accuracy: 0.9828 - precision: 0.9789 - recall: 0.9920 - val_loss: 0.2080 - val_accuracy: 0.9417 - val_precision: 0.9972 - val_recall: 0.9244
Epoch 25/35
10/10 [==============================] - 31s 3s/step - loss: 0.0511 - accuracy: 0.9859 - precision: 0.9891 - recall: 0.9864 - val_loss: 0.1802 - val_accuracy: 0.9446 - val_precision: 0.9972 - val_recall: 0.9282
Epoch 26/35
10/10 [==============================] - 31s 3s/step - loss: 0.0504 - accuracy: 0.9875 - precision: 0.9887 - recall: 0.9887 - val_loss: 0.1862 - val_accuracy: 0.9417 - val_precision: 0.9986 - val_recall: 0.9231
Epoch 27/35
10/10 [==============================] - 29s 3s/step - loss: 0.0851 - accuracy: 0.9748 - precision: 0.9917 - recall: 0.9676 - val_loss: 0.0752 - val_accuracy: 0.9761 - val_precision: 0.9896 - val_recall: 0.9782
Epoch 28/35
10/10 [==============================] - 30s 3s/step - loss: 0.0840 - accuracy: 0.9672 - precision: 0.9582 - recall: 0.9866 - val_loss: 0.1123 - val_accuracy: 0.9675 - val_precision: 0.9947 - val_recall: 0.9615
Epoch 29/35
10/10 [==============================] - 30s 3s/step - loss: 0.0463 - accuracy: 0.9906 - precision: 0.9918 - recall: 0.9918 - val_loss: 0.2845 - val_accuracy: 0.9265 - val_precision: 0.9986 - val_recall: 0.9026
Epoch 30/35
10/10 [==============================] - 30s 3s/step - loss: 0.0844 - accuracy: 0.9812 - precision: 0.9917 - recall: 0.9755 - val_loss: 0.3585 - val_accuracy: 0.9093 - val_precision: 1.0000 - val_recall: 0.8782
Epoch 31/35
10/10 [==============================] - 29s 3s/step - loss: 0.0345 - accuracy: 0.9916 - precision: 0.9942 - recall: 0.9914 - val_loss: 0.3136 - val_accuracy: 0.9188 - val_precision: 1.0000 - val_recall: 0.8910
Epoch 32/35
10/10 [==============================] - 30s 3s/step - loss: 0.0552 - accuracy: 0.9859 - precision: 0.9974 - recall: 0.9795 - val_loss: 0.2197 - val_accuracy: 0.9446 - val_precision: 1.0000 - val_recall: 0.9256
Epoch 33/35
10/10 [==============================] - 31s 3s/step - loss: 0.0441 - accuracy: 0.9859 - precision: 0.9812 - recall: 0.9946 - val_loss: 0.1687 - val_accuracy: 0.9561 - val_precision: 0.9986 - val_recall: 0.9423
Epoch 34/35
10/10 [==============================] - 31s 3s/step - loss: 0.0450 - accuracy: 0.9891 - precision: 0.9894 - recall: 0.9920 - val_loss: 0.1098 - val_accuracy: 0.9694 - val_precision: 0.9973 - val_recall: 0.9615
Epoch 35/35
10/10 [==============================] - 31s 3s/step - loss: 0.0515 - accuracy: 0.9844 - precision: 0.9896 - recall: 0.9845 - val_loss: 0.1273 - val_accuracy: 0.9666 - val_precision: 0.9960 - val_recall: 0.9590
In [ ]:
inceptionv3_modelo_imagenet_trained = inceptionv3_modelo_imagenet.fit(
    train_generator,
    epochs=35, 
    steps_per_epoch= 10,
    validation_data=validation_generator, 
    class_weight= class_weight,
    callbacks=[checkpoint_inception, early_stopping_inception, tensorboard_callback_inception])
Epoch 1/35
2023-04-24 11:51:19.687365: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
10/10 [==============================] - ETA: 0s - loss: 0.4657 - accuracy: 0.7969 - precision: 0.9050 - recall: 0.8197
2023-04-24 11:51:32.577095: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
10/10 [==============================] - 86s 8s/step - loss: 0.4657 - accuracy: 0.7969 - precision: 0.9050 - recall: 0.8197 - val_loss: 6.6713 - val_accuracy: 0.7450 - val_precision: 0.7450 - val_recall: 1.0000
Epoch 2/35
10/10 [==============================] - 75s 8s/step - loss: 0.1808 - accuracy: 0.9062 - precision: 0.9635 - recall: 0.9056 - val_loss: 4.0670 - val_accuracy: 0.7431 - val_precision: 0.9981 - val_recall: 0.6564
Epoch 3/35
10/10 [==============================] - 75s 8s/step - loss: 0.1806 - accuracy: 0.9187 - precision: 0.9676 - recall: 0.9167 - val_loss: 2.0466 - val_accuracy: 0.7717 - val_precision: 0.9963 - val_recall: 0.6962
Epoch 4/35
10/10 [==============================] - 73s 8s/step - loss: 0.1571 - accuracy: 0.9219 - precision: 0.9773 - recall: 0.9149 - val_loss: 6.0504 - val_accuracy: 0.7373 - val_precision: 0.9941 - val_recall: 0.6513
Epoch 5/35
10/10 [==============================] - 75s 8s/step - loss: 0.1216 - accuracy: 0.9312 - precision: 0.9769 - recall: 0.9254 - val_loss: 1.7361 - val_accuracy: 0.9064 - val_precision: 0.9189 - val_recall: 0.9590
Epoch 6/35
10/10 [==============================] - 73s 8s/step - loss: 0.1519 - accuracy: 0.9219 - precision: 0.9648 - recall: 0.9280 - val_loss: 24.3523 - val_accuracy: 0.7708 - val_precision: 0.7647 - val_recall: 1.0000
Epoch 7/35
10/10 [==============================] - 73s 8s/step - loss: 0.1176 - accuracy: 0.9594 - precision: 0.9912 - recall: 0.9536 - val_loss: 8.6743 - val_accuracy: 0.7536 - val_precision: 0.7514 - val_recall: 1.0000
Epoch 8/35
10/10 [==============================] - 75s 8s/step - loss: 0.0940 - accuracy: 0.9531 - precision: 0.9876 - recall: 0.9520 - val_loss: 0.2380 - val_accuracy: 0.9475 - val_precision: 0.9840 - val_recall: 0.9449
Epoch 9/35
10/10 [==============================] - 74s 8s/step - loss: 0.1004 - accuracy: 0.9500 - precision: 0.9835 - recall: 0.9522 - val_loss: 2.0456 - val_accuracy: 0.8529 - val_precision: 0.9728 - val_recall: 0.8256
Epoch 10/35
10/10 [==============================] - 74s 8s/step - loss: 0.0420 - accuracy: 0.9781 - precision: 0.9956 - recall: 0.9742 - val_loss: 0.1899 - val_accuracy: 0.9599 - val_precision: 0.9792 - val_recall: 0.9667
Epoch 11/35
10/10 [==============================] - 74s 8s/step - loss: 0.1434 - accuracy: 0.9187 - precision: 0.9809 - recall: 0.9031 - val_loss: 98.8263 - val_accuracy: 0.7450 - val_precision: 0.7450 - val_recall: 1.0000
Epoch 12/35
10/10 [==============================] - 74s 8s/step - loss: 0.0923 - accuracy: 0.9594 - precision: 0.9913 - recall: 0.9542 - val_loss: 1.1993 - val_accuracy: 0.9417 - val_precision: 0.9347 - val_recall: 0.9910
Epoch 13/35
10/10 [==============================] - 75s 8s/step - loss: 0.0706 - accuracy: 0.9585 - precision: 0.9825 - recall: 0.9614 - val_loss: 0.6559 - val_accuracy: 0.9054 - val_precision: 0.9985 - val_recall: 0.8744
Epoch 14/35
10/10 [==============================] - 75s 8s/step - loss: 0.0679 - accuracy: 0.9563 - precision: 0.9917 - recall: 0.9522 - val_loss: 18.7436 - val_accuracy: 0.2684 - val_precision: 1.0000 - val_recall: 0.0179
Epoch 15/35
10/10 [==============================] - 73s 8s/step - loss: 0.0798 - accuracy: 0.9563 - precision: 0.9823 - recall: 0.9569 - val_loss: 0.5126 - val_accuracy: 0.9628 - val_precision: 0.9625 - val_recall: 0.9885
Epoch 16/35
10/10 [==============================] - 73s 8s/step - loss: 0.0441 - accuracy: 0.9844 - precision: 0.9913 - recall: 0.9870 - val_loss: 0.4382 - val_accuracy: 0.9408 - val_precision: 0.9294 - val_recall: 0.9962
Epoch 17/35
10/10 [==============================] - 73s 8s/step - loss: 0.0977 - accuracy: 0.9563 - precision: 0.9867 - recall: 0.9528 - val_loss: 0.3129 - val_accuracy: 0.9389 - val_precision: 0.9262 - val_recall: 0.9974
Epoch 18/35
10/10 [==============================] - 75s 8s/step - loss: 0.0962 - accuracy: 0.9594 - precision: 0.9918 - recall: 0.9563 - val_loss: 0.5867 - val_accuracy: 0.9179 - val_precision: 0.9016 - val_recall: 0.9987
Epoch 19/35
10/10 [==============================] - 73s 8s/step - loss: 0.0550 - accuracy: 0.9750 - precision: 0.9917 - recall: 0.9754 - val_loss: 2.1447 - val_accuracy: 0.8185 - val_precision: 0.8041 - val_recall: 1.0000
Epoch 20/35
10/10 [==============================] - 73s 8s/step - loss: 0.0658 - accuracy: 0.9625 - precision: 0.9916 - recall: 0.9592 - val_loss: 3.5959 - val_accuracy: 0.7641 - val_precision: 0.7595 - val_recall: 1.0000
Epoch 21/35
10/10 [==============================] - 73s 8s/step - loss: 0.0509 - accuracy: 0.9750 - precision: 0.9959 - recall: 0.9720 - val_loss: 0.9771 - val_accuracy: 0.9102 - val_precision: 0.8933 - val_recall: 0.9987
Epoch 22/35
10/10 [==============================] - 74s 8s/step - loss: 0.0675 - accuracy: 0.9688 - precision: 0.9871 - recall: 0.9705 - val_loss: 5.1215 - val_accuracy: 0.7708 - val_precision: 0.7647 - val_recall: 1.0000
Epoch 23/35
10/10 [==============================] - 74s 8s/step - loss: 0.0547 - accuracy: 0.9781 - precision: 0.9957 - recall: 0.9748 - val_loss: 3.1164 - val_accuracy: 0.7880 - val_precision: 0.7812 - val_recall: 0.9936
Epoch 24/35
10/10 [==============================] - 75s 8s/step - loss: 0.0866 - accuracy: 0.9625 - precision: 0.9908 - recall: 0.9556 - val_loss: 4.3666 - val_accuracy: 0.7755 - val_precision: 0.7690 - val_recall: 0.9987
Epoch 25/35
10/10 [==============================] - 73s 8s/step - loss: 0.0711 - accuracy: 0.9712 - precision: 0.9956 - recall: 0.9660 - val_loss: 0.1946 - val_accuracy: 0.9408 - val_precision: 0.9591 - val_recall: 0.9615
Epoch 26/35
10/10 [==============================] - 75s 8s/step - loss: 0.0887 - accuracy: 0.9625 - precision: 0.9874 - recall: 0.9631 - val_loss: 0.0738 - val_accuracy: 0.9742 - val_precision: 0.9896 - val_recall: 0.9756
Epoch 27/35
10/10 [==============================] - 73s 8s/step - loss: 0.0385 - accuracy: 0.9719 - precision: 0.9914 - recall: 0.9705 - val_loss: 0.0963 - val_accuracy: 0.9723 - val_precision: 0.9711 - val_recall: 0.9923
Epoch 28/35
10/10 [==============================] - 74s 8s/step - loss: 0.0558 - accuracy: 0.9688 - precision: 0.9957 - recall: 0.9623 - val_loss: 0.1036 - val_accuracy: 0.9713 - val_precision: 0.9960 - val_recall: 0.9654
Epoch 29/35
10/10 [==============================] - 74s 8s/step - loss: 0.0389 - accuracy: 0.9844 - precision: 0.9880 - recall: 0.9920 - val_loss: 0.6056 - val_accuracy: 0.9150 - val_precision: 1.0000 - val_recall: 0.8859
Epoch 30/35
10/10 [==============================] - 73s 8s/step - loss: 0.0430 - accuracy: 0.9750 - precision: 0.9916 - recall: 0.9751 - val_loss: 0.5005 - val_accuracy: 0.9102 - val_precision: 0.9971 - val_recall: 0.8821
Epoch 31/35
10/10 [==============================] - 74s 8s/step - loss: 0.1235 - accuracy: 0.9625 - precision: 0.9816 - recall: 0.9638 - val_loss: 0.3625 - val_accuracy: 0.9236 - val_precision: 0.9930 - val_recall: 0.9038
Epoch 32/35
10/10 [==============================] - 74s 8s/step - loss: 0.0529 - accuracy: 0.9688 - precision: 0.9956 - recall: 0.9619 - val_loss: 0.2213 - val_accuracy: 0.9542 - val_precision: 0.9919 - val_recall: 0.9462
Epoch 33/35
10/10 [==============================] - 73s 8s/step - loss: 0.0365 - accuracy: 0.9844 - precision: 0.9911 - recall: 0.9867 - val_loss: 0.3843 - val_accuracy: 0.9188 - val_precision: 0.9929 - val_recall: 0.8974
Epoch 34/35
10/10 [==============================] - 73s 8s/step - loss: 0.0410 - accuracy: 0.9688 - precision: 0.9957 - recall: 0.9628 - val_loss: 0.1863 - val_accuracy: 0.9599 - val_precision: 0.9973 - val_recall: 0.9487
Epoch 35/35
10/10 [==============================] - 73s 8s/step - loss: 0.0294 - accuracy: 0.9872 - precision: 0.9906 - recall: 0.9906 - val_loss: 0.3772 - val_accuracy: 0.9245 - val_precision: 1.0000 - val_recall: 0.8987

Resnet50¶

In [ ]:
resnet50_modelo_trained  = resnet50_modelo.fit(
    train_generator,
    steps_per_epoch=100,
    epochs=30,
    validation_data=validation_generator, 
    class_weight= class_weight,
    callbacks=[checkpoint_resnet, early_stopping_resnet, tensorboard_callback_resnet, lr])
In [ ]:
resnet50_modelo_trained2  = resnet50_modelo.fit(
    train_generator_oversampling,
    steps_per_epoch=10,
    epochs=20,
    validation_data=validation_generator, 
    class_weight= class_weight_oversampled,
    callbacks=[early_stopping_resnet])
Epoch 1/20
2023-06-20 17:29:17.879006: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
10/10 [==============================] - ETA: 0s - loss: 19.0319 - accuracy: 0.6422 - precision: 0.6958 - recall: 0.6976
2023-06-20 17:29:43.923787: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
10/10 [==============================] - 54s 5s/step - loss: 19.0319 - accuracy: 0.6422 - precision: 0.6958 - recall: 0.6976 - val_loss: 0.8399 - val_accuracy: 0.9207 - val_precision: 0.9235 - val_recall: 0.9744
Epoch 2/20
10/10 [==============================] - 35s 3s/step - loss: 3.4050 - accuracy: 0.8813 - precision: 0.9077 - recall: 0.8985 - val_loss: 1.1861 - val_accuracy: 0.9035 - val_precision: 0.9942 - val_recall: 0.8756
Epoch 3/20
10/10 [==============================] - 34s 3s/step - loss: 1.6497 - accuracy: 0.8703 - precision: 0.8955 - recall: 0.8978 - val_loss: 0.4148 - val_accuracy: 0.9179 - val_precision: 0.9860 - val_recall: 0.9026
Epoch 4/20
10/10 [==============================] - 32s 3s/step - loss: 1.3206 - accuracy: 0.8766 - precision: 0.8982 - recall: 0.8958 - val_loss: 0.6489 - val_accuracy: 0.9064 - val_precision: 0.9942 - val_recall: 0.8795
Epoch 5/20
10/10 [==============================] - 32s 3s/step - loss: 1.1044 - accuracy: 0.9047 - precision: 0.9119 - recall: 0.9288 - val_loss: 1.4503 - val_accuracy: 0.8491 - val_precision: 1.0000 - val_recall: 0.7974
Epoch 6/20
10/10 [==============================] - 32s 3s/step - loss: 1.1335 - accuracy: 0.8969 - precision: 0.9148 - recall: 0.9049 - val_loss: 1.8144 - val_accuracy: 0.8386 - val_precision: 1.0000 - val_recall: 0.7833
Epoch 7/20
10/10 [==============================] - 32s 3s/step - loss: 1.4104 - accuracy: 0.8906 - precision: 0.9147 - recall: 0.8835 - val_loss: 1.1662 - val_accuracy: 0.8424 - val_precision: 1.0000 - val_recall: 0.7885
Epoch 8/20
10/10 [==============================] - 33s 3s/step - loss: 0.4107 - accuracy: 0.9203 - precision: 0.9304 - recall: 0.9377 - val_loss: 0.8663 - val_accuracy: 0.8634 - val_precision: 0.9984 - val_recall: 0.8179
Epoch 9/20
10/10 [==============================] - 33s 3s/step - loss: 0.4108 - accuracy: 0.9297 - precision: 0.9435 - recall: 0.9360 - val_loss: 0.3385 - val_accuracy: 0.9265 - val_precision: 0.9916 - val_recall: 0.9090
Epoch 10/20
10/10 [==============================] - 34s 3s/step - loss: 0.7506 - accuracy: 0.9187 - precision: 0.9270 - recall: 0.9321 - val_loss: 0.5837 - val_accuracy: 0.9007 - val_precision: 0.9971 - val_recall: 0.8692
Epoch 11/20
10/10 [==============================] - 32s 3s/step - loss: 0.5139 - accuracy: 0.9250 - precision: 0.9404 - recall: 0.9303 - val_loss: 0.8275 - val_accuracy: 0.8606 - val_precision: 0.9984 - val_recall: 0.8141
Epoch 12/20
10/10 [==============================] - 32s 3s/step - loss: 0.3954 - accuracy: 0.9344 - precision: 0.9348 - recall: 0.9503 - val_loss: 1.1394 - val_accuracy: 0.8118 - val_precision: 1.0000 - val_recall: 0.7474
Epoch 13/20
10/10 [==============================] - 33s 3s/step - loss: 0.4437 - accuracy: 0.9250 - precision: 0.9405 - recall: 0.9305 - val_loss: 0.9398 - val_accuracy: 0.8491 - val_precision: 1.0000 - val_recall: 0.7974
Epoch 14/20
10/10 [==============================] - 33s 3s/step - loss: 0.4391 - accuracy: 0.9187 - precision: 0.9392 - recall: 0.9189 - val_loss: 0.2423 - val_accuracy: 0.9341 - val_precision: 0.9917 - val_recall: 0.9192
Epoch 15/20
10/10 [==============================] - 32s 3s/step - loss: 0.3186 - accuracy: 0.9391 - precision: 0.9370 - recall: 0.9597 - val_loss: 0.4062 - val_accuracy: 0.9054 - val_precision: 0.9985 - val_recall: 0.8744
Epoch 16/20
10/10 [==============================] - 31s 3s/step - loss: 0.3729 - accuracy: 0.9391 - precision: 0.9628 - recall: 0.9354 - val_loss: 0.5417 - val_accuracy: 0.8720 - val_precision: 1.0000 - val_recall: 0.8282
Epoch 17/20
10/10 [==============================] - 30s 3s/step - loss: 0.3353 - accuracy: 0.9378 - precision: 0.9354 - recall: 0.9597 - val_loss: 0.5460 - val_accuracy: 0.8491 - val_precision: 1.0000 - val_recall: 0.7974
Epoch 18/20
10/10 [==============================] - 31s 3s/step - loss: 0.2366 - accuracy: 0.9438 - precision: 0.9515 - recall: 0.9515 - val_loss: 0.5466 - val_accuracy: 0.8529 - val_precision: 1.0000 - val_recall: 0.8026
Epoch 19/20
10/10 [==============================] - 31s 3s/step - loss: 0.3035 - accuracy: 0.9244 - precision: 0.9446 - recall: 0.9192 - val_loss: 0.2111 - val_accuracy: 0.9274 - val_precision: 0.9848 - val_recall: 0.9167
Epoch 20/20
10/10 [==============================] - 31s 3s/step - loss: 0.2974 - accuracy: 0.9277 - precision: 0.9288 - recall: 0.9428 - val_loss: 0.3221 - val_accuracy: 0.8835 - val_precision: 0.9955 - val_recall: 0.8474

VGG16¶

In [ ]:
vgg16_modelo_trained = vgg16_modelo.fit(
    train_generator,
    steps_per_epoch=10,
    epochs=20,
    validation_data=validation_generator,
    class_weight= class_weight,
    callbacks=[checkpoint_vgg, early_stopping_vgg, tensorboard_callback_vgg]
    )
    
In [ ]:
vgg16_modelo_trained2 = vgg16_modelo.fit(
    train_generator_oversampling,
    steps_per_epoch=10,
    epochs=20,
    validation_data=validation_generator,
    class_weight=class_weight_oversampled,
    callbacks=[early_stopping_vgg])
Epoch 1/20
2023-06-20 18:09:59.841463: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
10/10 [==============================] - ETA: 0s - loss: 3.6669 - accuracy: 0.6047 - precision: 0.6787 - recall: 0.7007
2023-06-20 18:10:24.877864: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
10/10 [==============================] - 50s 5s/step - loss: 3.6669 - accuracy: 0.6047 - precision: 0.6787 - recall: 0.7007 - val_loss: 0.6390 - val_accuracy: 0.7450 - val_precision: 0.7450 - val_recall: 1.0000
Epoch 2/20
10/10 [==============================] - 41s 4s/step - loss: 1.0083 - accuracy: 0.7078 - precision: 0.7429 - recall: 0.7980 - val_loss: 0.4035 - val_accuracy: 0.8386 - val_precision: 0.9935 - val_recall: 0.7885
Epoch 3/20
10/10 [==============================] - 41s 4s/step - loss: 0.4614 - accuracy: 0.8422 - precision: 0.8819 - recall: 0.8470 - val_loss: 0.3784 - val_accuracy: 0.8520 - val_precision: 0.9937 - val_recall: 0.8064
Epoch 4/20
10/10 [==============================] - 39s 4s/step - loss: 0.3769 - accuracy: 0.8874 - precision: 0.9216 - recall: 0.8750 - val_loss: 0.3279 - val_accuracy: 0.8529 - val_precision: 0.9937 - val_recall: 0.8077
Epoch 5/20
10/10 [==============================] - 41s 4s/step - loss: 0.4096 - accuracy: 0.8750 - precision: 0.8994 - recall: 0.8743 - val_loss: 0.2347 - val_accuracy: 0.8978 - val_precision: 0.9856 - val_recall: 0.8756
Epoch 6/20
10/10 [==============================] - 40s 4s/step - loss: 0.3161 - accuracy: 0.8953 - precision: 0.9273 - recall: 0.9015 - val_loss: 0.3458 - val_accuracy: 0.8472 - val_precision: 0.9936 - val_recall: 0.8000
Epoch 7/20
10/10 [==============================] - 41s 4s/step - loss: 0.3029 - accuracy: 0.9047 - precision: 0.9372 - recall: 0.9120 - val_loss: 0.2298 - val_accuracy: 0.8988 - val_precision: 0.9913 - val_recall: 0.8718
Epoch 8/20
10/10 [==============================] - 40s 4s/step - loss: 0.3026 - accuracy: 0.9094 - precision: 0.9382 - recall: 0.9089 - val_loss: 0.2042 - val_accuracy: 0.9140 - val_precision: 0.9915 - val_recall: 0.8923
Epoch 9/20
10/10 [==============================] - 41s 4s/step - loss: 0.3047 - accuracy: 0.9062 - precision: 0.9368 - recall: 0.9082 - val_loss: 0.2111 - val_accuracy: 0.9112 - val_precision: 0.9914 - val_recall: 0.8885
Epoch 10/20
10/10 [==============================] - 40s 4s/step - loss: 0.2720 - accuracy: 0.9193 - precision: 0.9408 - recall: 0.9124 - val_loss: 0.1740 - val_accuracy: 0.9312 - val_precision: 0.9810 - val_recall: 0.9256
Epoch 11/20
10/10 [==============================] - 41s 4s/step - loss: 0.3300 - accuracy: 0.8922 - precision: 0.9054 - recall: 0.9079 - val_loss: 0.1544 - val_accuracy: 0.9389 - val_precision: 0.9786 - val_recall: 0.9385
Epoch 12/20
10/10 [==============================] - 41s 4s/step - loss: 0.3210 - accuracy: 0.8953 - precision: 0.9115 - recall: 0.9091 - val_loss: 0.1616 - val_accuracy: 0.9370 - val_precision: 0.9798 - val_recall: 0.9346
Epoch 13/20
10/10 [==============================] - 40s 4s/step - loss: 0.2412 - accuracy: 0.9219 - precision: 0.9401 - recall: 0.9249 - val_loss: 0.2515 - val_accuracy: 0.8997 - val_precision: 0.9941 - val_recall: 0.8705
Epoch 14/20
10/10 [==============================] - 41s 4s/step - loss: 0.2706 - accuracy: 0.9187 - precision: 0.9390 - recall: 0.9243 - val_loss: 0.3977 - val_accuracy: 0.8329 - val_precision: 0.9984 - val_recall: 0.7769
Epoch 15/20
10/10 [==============================] - 40s 4s/step - loss: 0.2213 - accuracy: 0.9297 - precision: 0.9576 - recall: 0.9187 - val_loss: 0.2991 - val_accuracy: 0.8758 - val_precision: 0.9969 - val_recall: 0.8359
Epoch 16/20
10/10 [==============================] - 38s 4s/step - loss: 0.2182 - accuracy: 0.9412 - precision: 0.9520 - recall: 0.9493 - val_loss: 0.1645 - val_accuracy: 0.9312 - val_precision: 0.9863 - val_recall: 0.9205
Epoch 17/20
10/10 [==============================] - 40s 4s/step - loss: 0.2337 - accuracy: 0.9266 - precision: 0.9378 - recall: 0.9353 - val_loss: 0.1840 - val_accuracy: 0.9245 - val_precision: 0.9930 - val_recall: 0.9051
Epoch 18/20
10/10 [==============================] - 40s 4s/step - loss: 0.2162 - accuracy: 0.9359 - precision: 0.9474 - recall: 0.9396 - val_loss: 0.1357 - val_accuracy: 0.9475 - val_precision: 0.9827 - val_recall: 0.9462
Epoch 19/20
10/10 [==============================] - 39s 4s/step - loss: 0.3352 - accuracy: 0.8969 - precision: 0.9079 - recall: 0.9176 - val_loss: 0.1746 - val_accuracy: 0.9284 - val_precision: 0.9862 - val_recall: 0.9167
Epoch 20/20
10/10 [==============================] - 39s 4s/step - loss: 0.2600 - accuracy: 0.9031 - precision: 0.9203 - recall: 0.9103 - val_loss: 0.2899 - val_accuracy: 0.8768 - val_precision: 0.9954 - val_recall: 0.8385

nota:

poner el step_per_epoch alto puede estar sobrenetrennando el modelo, siempre es conveniente probar desde menos a mas

 Efficientnet¶

In [ ]:
model_history = modelo_eff.fit(train_generator_oversampling,
                            validation_data=validation_generator,
                            steps_per_epoch=10,
                            #validation_steps=validation_generator.n/64,
                            epochs=15,
                            verbose=1,
                            callbacks=[checkpoint_efficientnet, rlr])
In [ ]:
efficientenet_model_trained = efficientenet_model.fit(
    train_generator,
    epochs= 15, 
    steps_per_epoch= train_generator.n/32,
    validation_data= validation_generator,
    class_weight = class_weight,
    callbacks =[checkpoint_efficientnet, early_stopping_efficientnet,tensorboard_callback_efficientnet,rlr] 
    )
In [ ]:
efficientenet_model_trained2 = efficientenet_model.fit(
    train_generator_oversampling,
    epochs= 35, 
    steps_per_epoch= 10,
    validation_data= validation_generator,
    class_weight = class_weight_oversampled,
    callbacks =[checkpoint_efficientnet, early_stopping_efficientnet] 
    )
Epoch 1/35
2023-06-22 20:11:55.521769: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
10/10 [==============================] - ETA: 0s - loss: 0.8405 - accuracy: 0.5656 - precision: 0.5906 - recall: 0.8167
2023-06-22 20:13:06.351324: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
10/10 [==============================] - 175s 16s/step - loss: 0.8405 - accuracy: 0.5656 - precision: 0.5906 - recall: 0.8167 - val_loss: 0.5851 - val_accuracy: 0.7450 - val_precision: 0.7450 - val_recall: 1.0000
Epoch 2/35
10/10 [==============================] - 47s 5s/step - loss: 0.8021 - accuracy: 0.6297 - precision: 0.6297 - recall: 1.0000 - val_loss: 0.6127 - val_accuracy: 0.7450 - val_precision: 0.7450 - val_recall: 1.0000
Epoch 3/35
10/10 [==============================] - 37s 4s/step - loss: 0.8310 - accuracy: 0.5734 - precision: 0.5734 - recall: 1.0000 - val_loss: 0.6391 - val_accuracy: 0.7450 - val_precision: 0.7450 - val_recall: 1.0000
Epoch 4/35
10/10 [==============================] - 46s 5s/step - loss: 0.8169 - accuracy: 0.6067 - precision: 0.6120 - recall: 0.9808 - val_loss: 0.5908 - val_accuracy: 0.7450 - val_precision: 0.7450 - val_recall: 1.0000
Epoch 5/35
10/10 [==============================] - 36s 4s/step - loss: 0.8139 - accuracy: 0.6203 - precision: 0.6203 - recall: 1.0000 - val_loss: 0.6106 - val_accuracy: 0.7450 - val_precision: 0.7450 - val_recall: 1.0000
Epoch 6/35
10/10 [==============================] - 37s 4s/step - loss: 0.8336 - accuracy: 0.5688 - precision: 0.5721 - recall: 0.9754 - val_loss: 0.6425 - val_accuracy: 0.7450 - val_precision: 0.7450 - val_recall: 1.0000
Epoch 7/35
10/10 [==============================] - 35s 4s/step - loss: 0.8321 - accuracy: 0.5750 - precision: 0.5761 - recall: 0.9946 - val_loss: 0.6222 - val_accuracy: 0.7450 - val_precision: 0.7450 - val_recall: 1.0000
Epoch 8/35
10/10 [==============================] - 34s 3s/step - loss: 0.8315 - accuracy: 0.5734 - precision: 0.5734 - recall: 1.0000 - val_loss: 0.6332 - val_accuracy: 0.7450 - val_precision: 0.7450 - val_recall: 1.0000

Comparación de modelos¶

Acurracy = (True Positive + True Negative) / (True Positive + True Negative + False Positive + False Negative)

Recall = (True Positive) / (True Positive + False Negative)

Precision = (True Positive) / (True Positive + True Negative)

Nuevos modelos creados¶

Modelo Propio 1¶

In [ ]:
figure, axis = plt.subplots(2, 2, figsize=(20,10))
axis = axis.ravel()

for i,element in enumerate(['accuracy', 'loss','precision','recall']):
    axis[i].plot(modelo_propio1_trained.history[element])
    axis[i].plot(modelo_propio1_trained.history['val_' + element])
    axis[i].set_title('Model {}'.format(element))
    axis[i].set_xlabel('epochs')
    axis[i].set_ylabel(element)
    axis[i].legend(['train', 'val'])
In [ ]:
evaluation_modelo_propio1 = modelo_propio1.evaluate(test_generator, steps= 624)
print(f"Test Accuracy: {evaluation_modelo_propio1[1] * 100:.2f}%")
print(f"Test Accuracy: {evaluation_modelo_propio1[2] * 100:.2f}%")
print(f"Test Accuracy: {evaluation_modelo_propio1[3] * 100:.2f}%")
624/624 [==============================] - 11s 17ms/step - loss: 0.3721 - accuracy: 0.8654 - precision: 0.9026 - recall: 0.8795
Test Accuracy: 86.54%
Test Accuracy: 90.26%
Test Accuracy: 87.95%
In [ ]:
prediction_modelo1 = modelo_propio1.predict(test_generator)
  5/624 [..............................] - ETA: 8s  
2023-06-19 18:17:17.686059: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
624/624 [==============================] - 6s 9ms/step
In [ ]:
y_true = (test_generator.classes ).astype("int")
fpr, tpr, thresholds = roc_curve(y_true, prediction_modelo1)
plt.plot(thresholds, 1.-fpr)
plt.plot(thresholds, tpr)
plt.show()
crossover_index = np.min(np.where(1.-fpr <= tpr))
crossover_cutoff = thresholds[crossover_index]
crossover_specificity = 1.-fpr[crossover_index]
print("Crossover al {0:.2f} con especificidad {1:.2f}".format(crossover_cutoff, crossover_specificity))
Crossover al 0.16 con especificidad 0.86
In [ ]:
plt.plot(fpr, tpr)
plt.show()
print("El área bajo la curva ROC es igual a {0:.2f}".format(roc_auc_score(y_true, prediction_modelo1)))
El área bajo la curva ROC es igual a 0.93
In [ ]:
prediction_modelo1_2 = np.where(prediction_modelo1 < 0.16, np.round(prediction_modelo1) , np.round(prediction_modelo1+0.35) )

confunsion_matriz =  confusion_matrix(test_generator.classes, prediction_modelo1_2)

confunsion_matriz
Out[ ]:
array([[201,  33],
       [ 53, 337]])
In [ ]:
from tensorflow.keras import Model
last_conv_layer = modelo_propio1.get_layer('dense_4')
grad_model = Model([modelo_propio1.inputs], [last_conv_layer.output, modelo_propio1.output])
with tf.GradientTape() as tape:
    conv_outputs, predictions = grad_model(test_generator[0][0])
    loss = predictions[:, np.argmax(predictions[0])]
output = conv_outputs[0]
grads = tape.gradient(loss, conv_outputs)[0]

# Grad-CAM heatmap
weights = np.mean(grads, axis=(0))
cam = np.dot(output, weights)
In [ ]:
plt.imshow(test_generator[0][0][0])
Out[ ]:
<matplotlib.image.AxesImage at 0x7f87f75a2830>

Modelo propio 2¶

In [ ]:
figure, axis = plt.subplots(2, 2, figsize=(20,10))
axis = axis.ravel()

for i,element in enumerate(['accuracy', 'loss','precision','recall']):
    axis[i].plot(modelo_propio2_trained.history[element])
    axis[i].plot(modelo_propio2_trained.history['val_' + element])
    axis[i].set_title('Model {}'.format(element))
    axis[i].set_xlabel('epochs')
    axis[i].set_ylabel(element)
    axis[i].legend(['train', 'val'])
In [ ]:
evaluation_modelo_propio2 = modelo_propio2.evaluate(test_generator, steps= 624)
print(f"Test Accuracy: {evaluation_modelo_propio2[1] * 100:.2f}%")
624/624 [==============================] - 17s 25ms/step - loss: 0.4611 - accuracy: 0.8093 - precision: 0.7817 - recall: 0.9641
Test Accuracy: 80.93%
In [ ]:
prediction_propio2 = modelo_propio2.predict(test_generator)
2023-06-21 11:37:41.831840: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
624/624 [==============================] - 8s 11ms/step
In [ ]:
y_true = (test_generator.classes ).astype("int")
fpr, tpr, thresholds = roc_curve(y_true, prediction_propio2)
plt.plot(thresholds, 1.-fpr)
plt.plot(thresholds, tpr)
plt.show()
crossover_index = np.min(np.where(1.-fpr <= tpr))
crossover_cutoff = thresholds[crossover_index]
crossover_specificity = 1.-fpr[crossover_index]
print("Crossover al {0:.2f} con especificidad {1:.2f}".format(crossover_cutoff, crossover_specificity))
Crossover al 0.58 con especificidad 0.79
In [ ]:
plt.plot(fpr, tpr)
plt.show()
print("El área bajo la curva ROC es igual a {0:.2f}".format(roc_auc_score(y_true, prediction_propio2)))
El área bajo la curva ROC es igual a 0.88
In [ ]:
prediction_propio_cross = np.where(prediction_propio2 > 0.49, np.round(prediction_propio2+0.02) , np.round(prediction_propio2) )
prediction_propio_cross_medio = np.where(prediction_propio2 > 0.5, np.round(prediction_propio2) , np.round(prediction_propio2) )

confunsion_matriz =  confusion_matrix(test_generator.classes, prediction_propio_cross_medio)
In [ ]:
confunsion_matriz
Out[ ]:
array([[172,  62],
       [ 56, 334]])
In [ ]:
plt.figure(figsize=(10, 7))

# Utilizar Seaborn para crear un heatmap de la matriz de confusión
sns.set(font_scale=1.2)
sns.heatmap(confunsion_matriz, annot=True, fmt='g', cmap='Blues', cbar=False, annot_kws={"size": 14})

# Agregar títulos y etiquetas
plt.title('Matriz de Confusión', fontsize=20)
plt.xlabel('Clase Predicha', fontsize=16)
plt.ylabel('Clase Verdadera', fontsize=16)

# Mostrar la gráfica
plt.show()
In [ ]:
from sklearn.metrics import classification_report
print(classification_report(test_generator.classes, prediction_propio_cross_medio))
              precision    recall  f1-score   support

           0       0.75      0.74      0.74       234
           1       0.84      0.86      0.85       390

    accuracy                           0.81       624
   macro avg       0.80      0.80      0.80       624
weighted avg       0.81      0.81      0.81       624

Modelo propio 3¶

In [ ]:
evaluacion_modelopropio3 = modelo_propio3.evaluate(test_generator)
624/624 [==============================] - 16s 24ms/step - loss: 0.4633 - accuracy: 0.7981 - precision: 0.7762 - recall: 0.9513
In [ ]:
prediction_modelopropio3 = modelo_propio3.predict(test_generator)
2023-06-21 19:55:13.449667: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
624/624 [==============================] - 9s 13ms/step
In [ ]:
y_true = (test_generator.classes ).astype("int")
fpr, tpr, thresholds = roc_curve(y_true, prediction_modelopropio3)
plt.plot(thresholds, 1.-fpr)
plt.plot(thresholds, tpr)
plt.show()
crossover_index = np.min(np.where(1.-fpr <= tpr))
crossover_cutoff = thresholds[crossover_index]
crossover_specificity = 1.-fpr[crossover_index]
print("Crossover al {0:.2f} con especificidad {1:.2f}".format(crossover_cutoff, crossover_specificity))
Crossover al 0.84 con especificidad 0.83
In [ ]:
plt.plot(fpr, tpr)
plt.show()
print("El área bajo la curva ROC es igual a {0:.2f}".format(roc_auc_score(y_true, prediction_modelopropio3)))
El área bajo la curva ROC es igual a 0.90
In [ ]:
prediction_propio3_cross = np.where(prediction_modelopropio3 > 0.84, np.round(prediction_modelopropio3) , np.round(prediction_modelopropio3 - 0.35) )
prediction_propio3_cross_medio = np.where(prediction_modelopropio3 > 0.5, np.round(prediction_modelopropio3) , np.round(prediction_modelopropio3) )

confunsion_matriz =  confusion_matrix(test_generator.classes, prediction_propio3_cross_medio)
In [ ]:
confunsion_matriz
Out[ ]:
array([[127, 107],
       [ 19, 371]])
In [ ]:
plt.figure(figsize=(10, 7))

# Utilizar Seaborn para crear un heatmap de la matriz de confusión
sns.set(font_scale=1.2)
sns.heatmap(confunsion_matriz, annot=True, fmt='g', cmap='Blues', cbar=False, annot_kws={"size": 14})

# Agregar títulos y etiquetas
plt.title('Matriz de Confusión', fontsize=20)
plt.xlabel('Clase Predicha', fontsize=16)
plt.ylabel('Clase Verdadera', fontsize=16)

# Mostrar la gráfica
plt.show()
In [ ]:
from sklearn.metrics import classification_report
print(classification_report(test_generator.classes, prediction_propio3_cross))
              precision    recall  f1-score   support

           0       0.75      0.83      0.79       234
           1       0.89      0.83      0.86       390

    accuracy                           0.83       624
   macro avg       0.82      0.83      0.83       624
weighted avg       0.84      0.83      0.83       624

Modelos preentrenados¶

Grafica tensorboard

In [ ]:
%load_ext tensorboard
The tensorboard extension is already loaded. To reload it, use:
  %reload_ext tensorboard
In [ ]:
os.environ['TENSORBOARD_BINARY'] = '/Users/qiqizhou/Desktop/TFM/logs'
In [ ]:
!export TMPDIR=/tmp/USER; 

!mkdir -p $TMPDIR;
In [ ]:
!chmod 755
usage:	chmod [-fhv] [-R [-H | -L | -P]] [-a | +a | =a  [i][# [ n]]] mode|entry file ...
	chmod [-fhv] [-R [-H | -L | -P]] [-E | -C | -N | -i | -I] file ...
In [ ]:
%tensorboard --logdir logs
ERROR: Failed to start '/Users/qiqizhou/Desktop/TFM/logs' (set by the
`TENSORBOARD_BINARY` environment variable): [Errno 13] Permission
denied: '/Users/qiqizhou/Desktop/TFM/logs'

VGG16¶

In [ ]:
figure, axis = plt.subplots(2, 2, figsize=(20,10))
axis = axis.ravel()

for i,element in enumerate(['accuracy', 'loss','precision','recall']):
    axis[i].plot(vgg16_modelo_trained2.history[element])
    axis[i].plot(vgg16_modelo_trained2.history['val_' + element])
    axis[i].set_title('Model {}'.format(element))
    axis[i].set_xlabel('epochs')
    axis[i].set_ylabel(element)
    axis[i].legend(['train', 'val'])
In [ ]:
vgg16_modelo.evaluate(test_generator, steps=624)
624/624 [==============================] - 25s 37ms/step - loss: 0.2355 - accuracy: 0.9038 - precision: 0.9412 - recall: 0.9026
Out[ ]:
[0.23549507558345795,
 0.9038461446762085,
 0.9411764740943909,
 0.9025641083717346]

Gráfica

In [ ]:
figure, axis = plt.subplots(2, 2, figsize=(20,10))
axis = axis.ravel()

for i,element in enumerate(['accuracy', 'loss','precision','recall']):
    axis[i].plot(vgg16_modelo_trained.history[element])
    axis[i].plot(vgg16_modelo_trained.history['val_' + element])
    axis[i].set_title('Model {}'.format(element))
    axis[i].set_xlabel('epochs')
    axis[i].set_ylabel(element)
    axis[i].legend(['train', 'val'])

Prediccion test

Sirve para separar los clases de test

In [ ]:
no_idx = list(range(1, 235))
yes_idx = list(range(235, 624))

Predicción de la modelo VGG

In [ ]:
prediction_vgg = vgg16_modelo.predict(test_generator)
2023-06-20 18:37:59.083358: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
624/624 [==============================] - 15s 21ms/step
In [ ]:
plt.plot([prediction_vgg[i] for i in yes_idx], 'bo')
plt.show()
In [ ]:
plt.plot([prediction_vgg[i] for i in no_idx], 'ro')
plt.show()

Punto óptimo de corte

In [ ]:
y_true = (test_generator.classes ).astype("int")
fpr, tpr, thresholds = roc_curve(y_true, prediction_vgg)
plt.plot(thresholds, 1.-fpr)
plt.plot(thresholds, tpr)
plt.show()
crossover_index = np.min(np.where(1.-fpr <= tpr))
crossover_cutoff = thresholds[crossover_index]
crossover_specificity = 1.-fpr[crossover_index]
print("Crossover al {0:.2f} con especificidad {1:.2f}".format(crossover_cutoff, crossover_specificity))
Crossover al 0.49 con especificidad 0.90
In [ ]:
plt.plot(fpr, tpr)
plt.show()
print("El área bajo la curva ROC es igual a {0:.2f}".format(roc_auc_score(y_true, prediction_vgg)))
El área bajo la curva ROC es igual a 0.97
In [ ]:
prediction2 = np.where(prediction_vgg > 0.49, np.round(prediction_vgg+0.02) , np.round(prediction_vgg) )

confunsion_matriz =  confusion_matrix(test_generator.classes, prediction2)
In [ ]:
plt.figure(figsize=(10, 7))

# Utilizar Seaborn para crear un heatmap de la matriz de confusión
sns.set(font_scale=1.2)
sns.heatmap(confunsion_matriz, annot=True, fmt='g', cmap='Blues', cbar=False, annot_kws={"size": 14})

# Agregar títulos y etiquetas
plt.title('Matriz de Confusión', fontsize=20)
plt.xlabel('Clase Predicha', fontsize=16)
plt.ylabel('Clase Verdadera', fontsize=16)

# Mostrar la gráfica
plt.show()
In [ ]:
from sklearn.metrics import classification_report
print(classification_report(test_generator.classes, prediction2))
              precision    recall  f1-score   support

           0       0.85      0.90      0.88       234
           1       0.94      0.91      0.92       390

    accuracy                           0.91       624
   macro avg       0.90      0.90      0.90       624
weighted avg       0.91      0.91      0.91       624

In [ ]:
report = classification_report(test_generator.classes, prediction2, output_dict=True)

# Convierte el informe en un DataFrame de Pandas
df = pd.DataFrame(report).transpose()

# Muestra el DataFrame
print(df)
              precision    recall  f1-score     support
0              0.854251  0.901709  0.877339  234.000000
1              0.938992  0.907692  0.923077  390.000000
accuracy       0.905449  0.905449  0.905449    0.905449
macro avg      0.896622  0.904701  0.900208  624.000000
weighted avg   0.907214  0.905449  0.905925  624.000000

Para ver cuáles se ha equivocado y hacer un chequeo

In [ ]:
#vgg_model_importado = tf.keras.models.load_model('modelos/model_vgg.h5')

wrong_predicted_image = [[],[]]
correct_predicted_image = [[],[]]
i = 0 # el tamaño de imaganes que tenemos y el len de al lado (en nuestro caso tenemos solamente 624 imagenes en test)
# es hasta las malas predichas que queremos
while i< 624 and len(wrong_predicted_image[0]) < 200:
    j = 0
    while j < 1 and len(wrong_predicted_image[0]) <200:
        #print(i,0,j)
        image_array = (test_generator[i][0][j]).reshape(1,300,300,3)
        
        prediction = vgg16_modelo.predict(image_array)
        
        if int(round(prediction[0][0])) != test_generator[i][1][j]:
            wrong_predicted_image[0].append(image_array)
            wrong_predicted_image[1].append(int(round(prediction[0][0])))
            
        elif len(correct_predicted_image[0]) < 600:
            correct_predicted_image[0].append(image_array)
            correct_predicted_image[1].append(int(round(prediction[0][0])))
#         print(len(correct_predicted_image[0]),len(wrong_predicted_image[0]))  
        j += 1
        
    i += 1
2023-06-12 16:55:33.346026: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
1/1 [==============================] - 1s 640ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 39ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 36ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 29ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 36ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 37ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 36ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 36ms/step
1/1 [==============================] - 0s 36ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 37ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 36ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 36ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 29ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 36ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 36ms/step
1/1 [==============================] - 0s 36ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 38ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 36ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 36ms/step
1/1 [==============================] - 0s 37ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 36ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 38ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 29ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 29ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 29ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 29ms/step
1/1 [==============================] - 0s 29ms/step
1/1 [==============================] - 0s 35ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 34ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 29ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 29ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 36ms/step
1/1 [==============================] - 0s 32ms/step
1/1 [==============================] - 0s 32ms/step
In [ ]:
print(len(correct_predicted_image[1]))
len(wrong_predicted_image[1])
564
Out[ ]:
60

Mostrar todas las imagenes mal predichas con su etiqueta predicha

In [ ]:
for i, buenos in enumerate(wrong_predicted_image[0]):
    image = tf.keras.preprocessing.image.array_to_img(buenos.reshape(300,300,3))
    plt.imshow(image)
    plt.axis('off')  # Para ocultar los ejes
    
    # Agregar título con la etiqueta correspondiente
    etiqueta = wrong_predicted_image[1][i]
    plt.title(f'Etiqueta predicha: {etiqueta}', fontsize=10)
    
    plt.show()
In [ ]:
rcParams['figure.figsize'] = 22 ,4
fig, ax = plt.subplots(1,6)

i = 0

for ele in wrong_predicted_image[0]:
    image = tf.keras.preprocessing.image.array_to_img(ele.reshape(300,300,3))
    ax[i].imshow(image) 
    i += 1
    if i == 6:
        break;

print(f'wrong_prediction_by_model --- {wrong_predicted_image[1]}')
wrong_prediction_by_model --- [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]

Para mostrar todas las que se quiera

In [ ]:
rcParams['figure.figsize'] = 80 ,40

print(f'wrong_prediction_by_model --- {wrong_predicted_image[1]}')
fig, ax = plt.subplots(5, 6) # filas y columnas

for row in range(5):
    for col in range(6):
        index = row * 6 + col  # Índice de la imagen en wrong_predicted_image
        if index < len(wrong_predicted_image[0]):
            ele = wrong_predicted_image[0][index]
            image = tf.keras.preprocessing.image.array_to_img(ele.reshape(300, 300, 3))
            ax[row][col].imshow(image)
            ax[row][col].set_title(f'{wrong_predicted_image[1][index]}', fontsize=20)
        else:
            # Si no hay suficientes imágenes, se muestra un espacio en blanco
            ax[row][col].axis('off')
plt.tight_layout()
plt.show()
wrong_prediction_by_model --- [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
In [ ]:
rcParams['figure.figsize'] = 22 ,4
fig, ax = plt.subplots(1,6)

i = 0
for ele in correct_predicted_image[0]:
    image = tf.keras.preprocessing.image.array_to_img(ele.reshape(300,300,3))
    ax[i].imshow(image)
    i += 1
    if i == 6:
        break;

print(f'correct_prediction_by_model --- {correct_predicted_image[1]}')
correct_prediction_by_model --- [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
In [ ]:
vgg16_modelo.evaluate(test_generator, steps= 624)
624/624 [==============================] - 22s 35ms/step - loss: 0.6382 - accuracy: 0.6747 - precision: 0.9895 - recall: 0.4846
Out[ ]:
[0.6381601095199585,
 0.6746794581413269,
 0.9895287752151489,
 0.48461538553237915]
In [ ]:
evaluation_vgg = vgg16_modelo.evaluate(test_generator, steps= 624)
624/624 [==============================] - 21s 33ms/step - loss: 0.2679 - accuracy: 0.9006 - precision: 0.8850 - recall: 0.9667
In [ ]:
print('loss rate at evaluation data :', evaluation_vgg[0])
print('accuracy rate at evaluation data :', evaluation_vgg[1])
print('precision rate at evaluation data :', evaluation_vgg[2])
print('recall rate at evaluation data :', evaluation_vgg[3])
loss rate at evaluation data : 0.2679091989994049
accuracy rate at evaluation data : 0.9006410241127014
precision rate at evaluation data : 0.8849765062332153
recall rate at evaluation data : 0.9666666388511658
In [ ]:
vgg16_modelo.save('modelos_entero/vgg_0,32_0,8589,0,84953_0,9461')
WARNING:absl:Found untraced functions such as _jit_compiled_convolution_op, _jit_compiled_convolution_op, _jit_compiled_convolution_op, _jit_compiled_convolution_op, _jit_compiled_convolution_op while saving (showing 5 of 13). These functions will not be directly callable after loading.
INFO:tensorflow:Assets written to: modelos_entero/vgg_0,32_0,8589,0,84953_0,9461/assets
INFO:tensorflow:Assets written to: modelos_entero/vgg_0,32_0,8589,0,84953_0,9461/assets

Inception¶

Gráfica

In [ ]:
figure, axis = plt.subplots(2, 2, figsize=(20,10))
axis = axis.ravel()

for i,element in enumerate(['accuracy', 'loss','precision','recall']):
    axis[i].plot(inceptionv3_modelo_imagenet_trained2.history[element])
    axis[i].plot(inceptionv3_modelo_imagenet_trained2.history['val_' + element])
    axis[i].set_title('Model {}'.format(element))
    axis[i].set_xlabel('epochs')
    axis[i].set_ylabel(element)
    axis[i].legend(['train', 'val'])
In [ ]:
figure, axis = plt.subplots(2, 2, figsize=(20,10))
axis = axis.ravel()

for i,element in enumerate(['accuracy', 'loss','precision','recall']):
    axis[i].plot(inceptionv3_modelo_imagenet_trained.history[element])
    axis[i].plot(inceptionv3_modelo_imagenet_trained.history['val_' + element])
    axis[i].set_title('Model {}'.format(element))
    axis[i].set_xlabel('epochs')
    axis[i].set_ylabel(element)
    axis[i].legend(['train', 'val'])

Estudio de interpretabilidad¶

In [ ]:
inception_model_importado = tf.keras.models.load_model('modelos_entero/inception_0,29_0,926,0,365_0,9461')
In [ ]:
import shap
shap.initjs()
/opt/anaconda3/envs/Creadonuevo/lib/python3.10/site-packages/shap/utils/_clustering.py:35: NumbaDeprecationWarning: The 'nopython' keyword argument was not supplied to the 'numba.jit' decorator. The implicit default value for this argument is currently False, but it will be changed to True in Numba 0.59.0. See https://numba.readthedocs.io/en/stable/reference/deprecation.html#deprecation-of-object-mode-fall-back-behaviour-when-using-jit for details.
  def _pt_shuffle_rec(i, indexes, index_mask, partition_tree, M, pos):
/opt/anaconda3/envs/Creadonuevo/lib/python3.10/site-packages/shap/utils/_clustering.py:54: NumbaDeprecationWarning: The 'nopython' keyword argument was not supplied to the 'numba.jit' decorator. The implicit default value for this argument is currently False, but it will be changed to True in Numba 0.59.0. See https://numba.readthedocs.io/en/stable/reference/deprecation.html#deprecation-of-object-mode-fall-back-behaviour-when-using-jit for details.
  def delta_minimization_order(all_masks, max_swap_size=100, num_passes=2):
/opt/anaconda3/envs/Creadonuevo/lib/python3.10/site-packages/shap/utils/_clustering.py:63: NumbaDeprecationWarning: The 'nopython' keyword argument was not supplied to the 'numba.jit' decorator. The implicit default value for this argument is currently False, but it will be changed to True in Numba 0.59.0. See https://numba.readthedocs.io/en/stable/reference/deprecation.html#deprecation-of-object-mode-fall-back-behaviour-when-using-jit for details.
  def _reverse_window(order, start, length):
/opt/anaconda3/envs/Creadonuevo/lib/python3.10/site-packages/shap/utils/_clustering.py:69: NumbaDeprecationWarning: The 'nopython' keyword argument was not supplied to the 'numba.jit' decorator. The implicit default value for this argument is currently False, but it will be changed to True in Numba 0.59.0. See https://numba.readthedocs.io/en/stable/reference/deprecation.html#deprecation-of-object-mode-fall-back-behaviour-when-using-jit for details.
  def _reverse_window_score_gain(masks, order, start, length):
/opt/anaconda3/envs/Creadonuevo/lib/python3.10/site-packages/shap/utils/_clustering.py:77: NumbaDeprecationWarning: The 'nopython' keyword argument was not supplied to the 'numba.jit' decorator. The implicit default value for this argument is currently False, but it will be changed to True in Numba 0.59.0. See https://numba.readthedocs.io/en/stable/reference/deprecation.html#deprecation-of-object-mode-fall-back-behaviour-when-using-jit for details.
  def _mask_delta_score(m1, m2):
/opt/anaconda3/envs/Creadonuevo/lib/python3.10/site-packages/shap/links.py:5: NumbaDeprecationWarning: The 'nopython' keyword argument was not supplied to the 'numba.jit' decorator. The implicit default value for this argument is currently False, but it will be changed to True in Numba 0.59.0. See https://numba.readthedocs.io/en/stable/reference/deprecation.html#deprecation-of-object-mode-fall-back-behaviour-when-using-jit for details.
  def identity(x):
/opt/anaconda3/envs/Creadonuevo/lib/python3.10/site-packages/shap/links.py:10: NumbaDeprecationWarning: The 'nopython' keyword argument was not supplied to the 'numba.jit' decorator. The implicit default value for this argument is currently False, but it will be changed to True in Numba 0.59.0. See https://numba.readthedocs.io/en/stable/reference/deprecation.html#deprecation-of-object-mode-fall-back-behaviour-when-using-jit for details.
  def _identity_inverse(x):
/opt/anaconda3/envs/Creadonuevo/lib/python3.10/site-packages/shap/links.py:15: NumbaDeprecationWarning: The 'nopython' keyword argument was not supplied to the 'numba.jit' decorator. The implicit default value for this argument is currently False, but it will be changed to True in Numba 0.59.0. See https://numba.readthedocs.io/en/stable/reference/deprecation.html#deprecation-of-object-mode-fall-back-behaviour-when-using-jit for details.
  def logit(x):
/opt/anaconda3/envs/Creadonuevo/lib/python3.10/site-packages/shap/links.py:20: NumbaDeprecationWarning: The 'nopython' keyword argument was not supplied to the 'numba.jit' decorator. The implicit default value for this argument is currently False, but it will be changed to True in Numba 0.59.0. See https://numba.readthedocs.io/en/stable/reference/deprecation.html#deprecation-of-object-mode-fall-back-behaviour-when-using-jit for details.
  def _logit_inverse(x):
/opt/anaconda3/envs/Creadonuevo/lib/python3.10/site-packages/shap/utils/_masked_model.py:363: NumbaDeprecationWarning: The 'nopython' keyword argument was not supplied to the 'numba.jit' decorator. The implicit default value for this argument is currently False, but it will be changed to True in Numba 0.59.0. See https://numba.readthedocs.io/en/stable/reference/deprecation.html#deprecation-of-object-mode-fall-back-behaviour-when-using-jit for details.
  def _build_fixed_single_output(averaged_outs, last_outs, outputs, batch_positions, varying_rows, num_varying_rows, link, linearizing_weights):
/opt/anaconda3/envs/Creadonuevo/lib/python3.10/site-packages/shap/utils/_masked_model.py:385: NumbaDeprecationWarning: The 'nopython' keyword argument was not supplied to the 'numba.jit' decorator. The implicit default value for this argument is currently False, but it will be changed to True in Numba 0.59.0. See https://numba.readthedocs.io/en/stable/reference/deprecation.html#deprecation-of-object-mode-fall-back-behaviour-when-using-jit for details.
  def _build_fixed_multi_output(averaged_outs, last_outs, outputs, batch_positions, varying_rows, num_varying_rows, link, linearizing_weights):
/opt/anaconda3/envs/Creadonuevo/lib/python3.10/site-packages/shap/utils/_masked_model.py:428: NumbaDeprecationWarning: The 'nopython' keyword argument was not supplied to the 'numba.jit' decorator. The implicit default value for this argument is currently False, but it will be changed to True in Numba 0.59.0. See https://numba.readthedocs.io/en/stable/reference/deprecation.html#deprecation-of-object-mode-fall-back-behaviour-when-using-jit for details.
  def _init_masks(cluster_matrix, M, indices_row_pos, indptr):
/opt/anaconda3/envs/Creadonuevo/lib/python3.10/site-packages/shap/utils/_masked_model.py:439: NumbaDeprecationWarning: The 'nopython' keyword argument was not supplied to the 'numba.jit' decorator. The implicit default value for this argument is currently False, but it will be changed to True in Numba 0.59.0. See https://numba.readthedocs.io/en/stable/reference/deprecation.html#deprecation-of-object-mode-fall-back-behaviour-when-using-jit for details.
  def _rec_fill_masks(cluster_matrix, indices_row_pos, indptr, indices, M, ind):
/opt/anaconda3/envs/Creadonuevo/lib/python3.10/site-packages/shap/maskers/_tabular.py:186: NumbaDeprecationWarning: The 'nopython' keyword argument was not supplied to the 'numba.jit' decorator. The implicit default value for this argument is currently False, but it will be changed to True in Numba 0.59.0. See https://numba.readthedocs.io/en/stable/reference/deprecation.html#deprecation-of-object-mode-fall-back-behaviour-when-using-jit for details.
  def _single_delta_mask(dind, masked_inputs, last_mask, data, x, noop_code):
/opt/anaconda3/envs/Creadonuevo/lib/python3.10/site-packages/shap/maskers/_tabular.py:197: NumbaDeprecationWarning: The 'nopython' keyword argument was not supplied to the 'numba.jit' decorator. The implicit default value for this argument is currently False, but it will be changed to True in Numba 0.59.0. See https://numba.readthedocs.io/en/stable/reference/deprecation.html#deprecation-of-object-mode-fall-back-behaviour-when-using-jit for details.
  def _delta_masking(masks, x, curr_delta_inds, varying_rows_out,
/opt/anaconda3/envs/Creadonuevo/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html
  from .autonotebook import tqdm as notebook_tqdm
/opt/anaconda3/envs/Creadonuevo/lib/python3.10/site-packages/shap/maskers/_image.py:175: NumbaDeprecationWarning: The 'nopython' keyword argument was not supplied to the 'numba.jit' decorator. The implicit default value for this argument is currently False, but it will be changed to True in Numba 0.59.0. See https://numba.readthedocs.io/en/stable/reference/deprecation.html#deprecation-of-object-mode-fall-back-behaviour-when-using-jit for details.
  def _jit_build_partition_tree(xmin, xmax, ymin, ymax, zmin, zmax, total_ywidth, total_zwidth, M, clustering, q):
/opt/anaconda3/envs/Creadonuevo/lib/python3.10/site-packages/shap/explainers/_partition.py:676: NumbaDeprecationWarning: The 'nopython' keyword argument was not supplied to the 'numba.jit' decorator. The implicit default value for this argument is currently False, but it will be changed to True in Numba 0.59.0. See https://numba.readthedocs.io/en/stable/reference/deprecation.html#deprecation-of-object-mode-fall-back-behaviour-when-using-jit for details.
  def lower_credit(i, value, M, values, clustering):
The 'nopython' keyword argument was not supplied to the 'numba.jit' decorator. The implicit default value for this argument is currently False, but it will be changed to True in Numba 0.59.0. See https://numba.readthedocs.io/en/stable/reference/deprecation.html#deprecation-of-object-mode-fall-back-behaviour-when-using-jit for details.
The 'nopython' keyword argument was not supplied to the 'numba.jit' decorator. The implicit default value for this argument is currently False, but it will be changed to True in Numba 0.59.0. See https://numba.readthedocs.io/en/stable/reference/deprecation.html#deprecation-of-object-mode-fall-back-behaviour-when-using-jit for details.
In [ ]:
imagen = cv2.imread("/Users/qiqizhou/Desktop/TFM/chest_xray/test/NORMAL/NORMAL2-IM-0041-0001.jpeg")
imagen_test = cv2.imread("/Users/qiqizhou/Desktop/TFM/chest_xray/test/NORMAL/NORMAL2-IM-0060-0001.jpeg")
In [ ]:
batch = test_generator.next()
images_for_explanation = batch[0] 
In [ ]:
batch2 = test_generator2.next()
images_for_explanation_2 = batch2[0] 
In [ ]:
for i in range(300):
    batch2 = test_generator2.next()
In [ ]:
batch2[1]
Out[ ]:
array([1.], dtype=float32)
In [ ]:
for i in range(100):
    batch = test_generator.next()

images_for_explanation = batch[0] 
In [ ]:
imagen_t = batch[0]
eti_t = batch[1]
In [ ]:
eti_t
Out[ ]:
array([1.], dtype=float32)
In [ ]:
batch_train = train_generator_oversampling.next()
images_for_explanation_train = batch_train[0]
In [ ]:
num_reference_images = 100  # El número de imágenes de referencia que deseas utilizar
reference_images = []
for i in range((num_reference_images + train_generator_oversampling.batch_size - 1) // train_generator_oversampling.batch_size):
    images, _ = next(train_generator_oversampling)
    reference_images.extend(images)
    if len(reference_images) >= num_reference_images:
        break
reference_images = np.array(reference_images)[:num_reference_images]
In [ ]:
num_reference_images2 = 100  # El número de imágenes de referencia que deseas utilizar
reference_images2 = []
for i in range((num_reference_images2 + train_generator_oversampling2.batch_size - 1) // train_generator_oversampling2.batch_size):
    images, _ = next(train_generator_oversampling2)
    reference_images2.extend(images)
    if len(reference_images2) >= num_reference_images2:
        break
reference_images2 = np.array(reference_images2)[:num_reference_images2]

mobilnet

In [ ]:
explainer = shap.GradientExplainer(model_mobil, reference_images2)

# we explain the model's predictions on the first three samples of the test set
shap_values = explainer.shap_values(reference_images2)
2023-06-22 21:13:59.824226: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.

inception

In [ ]:
explainer = shap.GradientExplainer(inception_model_importado, reference_images)

# we explain the model's predictions on the first three samples of the test set
shap_values = explainer.shap_values(reference_images[:100])
`tf.keras.backend.set_learning_phase` is deprecated and will be removed after 2020-10-11. To update it, simply pass a True/False value to the `training` argument of the `__call__` method of your layer or model.
2023-06-22 20:53:56.335450: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
In [ ]:
batch2 = test_generator2.next()
images_for_explanation_2 = batch2[0] 
In [ ]:
shap_values = explainer.shap_values(images_for_explanation_2)
In [ ]:
shap.image_plot(shap_values, images_for_explanation_2)
In [ ]:
shap.image_plot(shap_values, images_for_explanation)

Prediccion test

In [ ]:
inceptionv3_modelo_imagenet.evaluate(test_generator)
624/624 [==============================] - 80s 110ms/step - loss: 0.2312 - accuracy: 0.9199 - precision: 0.9048 - recall: 0.9744
Out[ ]:
[0.23116599023342133,
 0.9198718070983887,
 0.9047619104385376,
 0.9743589758872986]
In [ ]:
prediction_inception_buena = inceptionv3_modelo_imagenet.predict(test_generator)
2023-06-22 19:30:38.567321: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
624/624 [==============================] - 59s 73ms/step
In [ ]:
prediction_inception = inception_model_importado.predict(test_generator)
624/624 [==============================] - 43s 65ms/step
In [ ]:
plt.plot([prediction_inception[i] for i in yes_idx], 'bo')
plt.show()
In [ ]:
plt.plot([prediction_inception[i] for i in no_idx], 'ro')
plt.show()

Punto óptimo de corte

In [ ]:
y_true = (test_generator.classes ).astype("int")
fpr, tpr, thresholds = roc_curve(y_true, prediction_inception_buena)
plt.plot(thresholds, 1.-fpr)
plt.plot(thresholds, tpr)
plt.show()
crossover_index = np.min(np.where(1.-fpr <= tpr))
crossover_cutoff = thresholds[crossover_index]
crossover_specificity = 1.-fpr[crossover_index]
crossover_sensitivity = tpr[crossover_index]
print("Crossover al {0:.2f} con especificidad {1:.2f} y sensibilidad {2:2f}".format(crossover_cutoff, crossover_specificity, crossover_sensitivity))
Crossover al 0.94 con especificidad 0.93 y sensibilidad 0.933333
In [ ]:
y_true = (test_generator.classes ).astype("int")
fpr, tpr, thresholds = roc_curve(y_true, prediction_inception)
plt.plot(thresholds, 1.-fpr)
plt.plot(thresholds, tpr)
plt.show()
crossover_index = np.min(np.where(1.-fpr <= tpr))
crossover_cutoff = thresholds[crossover_index]
crossover_specificity = 1.-fpr[crossover_index]
crossover_sensitivity = tpr[crossover_index]
print("Crossover al {0:.2f} con especificidad {1:.2f} y sensibilidad {2:2f}".format(crossover_cutoff, crossover_specificity, crossover_sensitivity))
Crossover al 0.74 con especificidad 0.92 y sensibilidad 0.930769
In [ ]:
plt.plot(fpr, tpr)
plt.show()
print("El área bajo la curva ROC es igual a {0:.2f}".format(roc_auc_score(y_true, prediction_inception)))

Ajuste de threshold y matriz de confunsion

In [ ]:
prediction_inception_crossover = np.where(prediction_inception > 0.74, np.round(prediction_inception) , np.round(prediction_inception-0.5) )

prediction_inception_crossover_medio = np.where(prediction_inception > 0.5, np.round(prediction_inception) , np.round(prediction_inception) )

prediction_inception_crossover_medio_buena = np.where(prediction_inception_buena > 0.5, np.round(prediction_inception_buena) , np.round(prediction_inception_buena) )

confunsion_matriz =  confusion_matrix(test_generator.classes, prediction_inception_crossover_medio)
In [ ]:
confunsion_matriz
Out[ ]:
array([[209,  25],
       [ 21, 369]])
In [ ]:
plt.figure(figsize=(10, 7))

# Utilizar Seaborn para crear un heatmap de la matriz de confusión
sns.set(font_scale=1.2)
sns.heatmap(confunsion_matriz, annot=True, fmt='g', cmap='Blues', cbar=False, annot_kws={"size": 14})

# Agregar títulos y etiquetas
plt.title('Matriz de Confusión', fontsize=20)
plt.xlabel('Clase Predicha', fontsize=16)
plt.ylabel('Clase Verdadera', fontsize=16)

# Mostrar la gráfica
plt.show()
In [ ]:
print(classification_report(test_generator.classes, prediction_inception_crossover_medio))
              precision    recall  f1-score   support

           0       0.91      0.89      0.90       234
           1       0.94      0.95      0.94       390

    accuracy                           0.93       624
   macro avg       0.92      0.92      0.92       624
weighted avg       0.93      0.93      0.93       624

In [ ]:
accuracy = np.trace(confunsion_matriz) / np.sum(confunsion_matriz)

# Calculate recall (sensitivity)
recall = confunsion_matriz[1, 1] / (confunsion_matriz[1, 0] + confunsion_matriz[1, 1])

# Calculate precision
precision = confunsion_matriz[1, 1] / (confunsion_matriz[0, 1] + confunsion_matriz[1, 1])

# Calculate F1-Score
f1_score = 2 * (precision * recall) / (precision + recall)

print("Accuracy: ", accuracy)
print("Recall: ", recall)
print("Precision: ", precision)
print("F1-Score: ", f1_score)
Accuracy:  0.9262820512820513
Recall:  0.9307692307692308
Precision:  0.9502617801047121
F1-Score:  0.9404145077720208
In [ ]:
#inception_model_importado = tf.keras.models.load_model('modelos_entero/inception_0,29_0,926,0,365_0,9461')

wrong_predicted_image = [[],[]]
correct_predicted_image = [[],[]]
i = 0 # el tamaño de imaganes que tenemos y el len de al lado (en nuestro caso tenemos solamente 624 imagenes en test)
# es hasta las malas que queremos
while i< 624 and len(wrong_predicted_image[0]) < 200:
    j = 0
    while j < 1 and len(wrong_predicted_image[0]) < 200:
        #print(i,0,j)
        image_array = (test_generator[i][0][j]).reshape(1,300,300,3)
        
        prediction = inception_model_importado.predict(image_array)
        
        if int(round(prediction[0][0])) != test_generator[i][1][j]:
            wrong_predicted_image[0].append(image_array)
            wrong_predicted_image[1].append(int(round(prediction[0][0])))
            
        elif len(correct_predicted_image[0]) < 600: # en caso de que queramos que guarda hasta un numero de correctas 
            correct_predicted_image[0].append(image_array)
            correct_predicted_image[1].append(int(round(prediction[0][0])))
#         print(len(correct_predicted_image[0]),len(wrong_predicted_image[0]))  
        j += 1
        
    i += 1
2023-06-12 18:56:24.182862: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
1/1 [==============================] - 3s 3s/step
1/1 [==============================] - 0s 111ms/step
1/1 [==============================] - 0s 83ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 86ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 84ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 83ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 84ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - 0s 85ms/step
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - 0s 83ms/step
1/1 [==============================] - 0s 84ms/step
1/1 [==============================] - 0s 83ms/step
1/1 [==============================] - 0s 86ms/step
1/1 [==============================] - 0s 95ms/step
1/1 [==============================] - 0s 89ms/step
1/1 [==============================] - 0s 87ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 83ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - 0s 90ms/step
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - 0s 83ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - 0s 103ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - 0s 83ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 84ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - 0s 87ms/step
1/1 [==============================] - 0s 85ms/step
1/1 [==============================] - 0s 87ms/step
1/1 [==============================] - 0s 86ms/step
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - 0s 86ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 85ms/step
1/1 [==============================] - 0s 87ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 89ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - 0s 85ms/step
1/1 [==============================] - 0s 86ms/step
1/1 [==============================] - 0s 83ms/step
1/1 [==============================] - 0s 86ms/step
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 83ms/step
1/1 [==============================] - 0s 85ms/step
1/1 [==============================] - 0s 84ms/step
1/1 [==============================] - 0s 83ms/step
1/1 [==============================] - 0s 85ms/step
1/1 [==============================] - 0s 87ms/step
1/1 [==============================] - 0s 87ms/step
1/1 [==============================] - 0s 84ms/step
1/1 [==============================] - 0s 89ms/step
1/1 [==============================] - 0s 90ms/step
1/1 [==============================] - 0s 87ms/step
1/1 [==============================] - 0s 87ms/step
1/1 [==============================] - 0s 86ms/step
1/1 [==============================] - 0s 90ms/step
1/1 [==============================] - 0s 89ms/step
1/1 [==============================] - 0s 91ms/step
1/1 [==============================] - 0s 86ms/step
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - 0s 83ms/step
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - 0s 83ms/step
1/1 [==============================] - 0s 90ms/step
1/1 [==============================] - 0s 88ms/step
1/1 [==============================] - 0s 88ms/step
1/1 [==============================] - 0s 83ms/step
1/1 [==============================] - 0s 88ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - 0s 86ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 83ms/step
1/1 [==============================] - 0s 84ms/step
1/1 [==============================] - 0s 85ms/step
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 84ms/step
1/1 [==============================] - 0s 83ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 85ms/step
1/1 [==============================] - 0s 83ms/step
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 91ms/step
1/1 [==============================] - 0s 83ms/step
1/1 [==============================] - 0s 83ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 89ms/step
1/1 [==============================] - 0s 85ms/step
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 84ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 83ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 82ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 80ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 81ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 76ms/step
1/1 [==============================] - 0s 78ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 84ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 73ms/step
1/1 [==============================] - 0s 73ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 74ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 76ms/step
In [ ]:
print(len(correct_predicted_image[1]))
len(wrong_predicted_image[1])
578
Out[ ]:
46
In [ ]:
rcParams['figure.figsize'] = 22 ,4
fig, ax = plt.subplots(1,6)

i = 0
for ele in correct_predicted_image[0]:
    image = tf.keras.preprocessing.image.array_to_img(ele.reshape(300,300,3))
    ax[i].imshow(image)
    i += 1
    if i ==6 :
        break

print(f'wrong_prediction_by_model --- {wrong_predicted_image[1]}')
wrong_prediction_by_model --- [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
In [ ]:
evaluation_inception = inceptionv3_modelo_imagenet.evaluate(test_generator, steps=624)
2023-07-11 22:18:57.210916: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
624/624 [==============================] - 48s 70ms/step - loss: 0.2978 - accuracy: 0.9263 - precision: 0.9365 - recall: 0.9462
In [ ]:
print('loss rate at evaluation data :', evaluation_inception[0])
print('accuracy rate at evaluation data :', evaluation_inception[1])
print('precision rate at evaluation data :', evaluation_inception[2])
print('recall rate at evaluation data :', evaluation_inception[3])
loss rate at evaluation data : 0.2978222072124481
accuracy rate at evaluation data : 0.9262820482254028
precision rate at evaluation data : 0.9365482330322266
recall rate at evaluation data : 0.9461538195610046

Si es bueno el modelo entonces lo guardamos por completo para no volver a ejecutar todo de nuevo

In [ ]:
inceptionv3_modelo_imagenet.save('modelos_entero/inception_0,29_0,926,0,365_0,9461')
WARNING:absl:Found untraced functions such as _jit_compiled_convolution_op, _jit_compiled_convolution_op, _jit_compiled_convolution_op, _jit_compiled_convolution_op, _jit_compiled_convolution_op while saving (showing 5 of 94). These functions will not be directly callable after loading.
INFO:tensorflow:Assets written to: modelos_entero/inception_0,29_0,926,0,365_0,9461/assets
INFO:tensorflow:Assets written to: modelos_entero/inception_0,29_0,926,0,365_0,9461/assets

ResNet50¶

Gráfica

In [ ]:
figure, axis = plt.subplots(2, 2, figsize=(20,10))
axis = axis.ravel()

for i,element in enumerate(['accuracy', 'loss','precision','recall']):
    axis[i].plot(resnet50_modelo_trained2.history[element])
    axis[i].plot(resnet50_modelo_trained2.history['val_' + element])
    axis[i].set_title('Model {}'.format(element))
    axis[i].set_xlabel('epochs')
    axis[i].set_ylabel(element)
    axis[i].legend(['train', 'val'])
In [ ]:
resnet50_modelo.evaluate(test_generator)
624/624 [==============================] - 43s 62ms/step - loss: 0.2539 - accuracy: 0.9151 - precision: 0.9399 - recall: 0.9231
Out[ ]:
[0.25394192337989807,
 0.9150640964508057,
 0.939947783946991,
 0.9230769276618958]
In [ ]:
resnet50_modelo.save('modelo_vista/resnet50v2_utlimo.h5')
In [ ]:
prediction_resnet = resnet50_modelo.predict(test_generator)
2023-06-20 17:42:09.126990: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
624/624 [==============================] - 35s 47ms/step
In [ ]:
y_true = (test_generator.classes ).astype("int")
fpr, tpr, thresholds = roc_curve(y_true, prediction_resnet)
plt.plot(thresholds, 1.-fpr)
plt.plot(thresholds, tpr)
plt.show()
crossover_index = np.min(np.where(1.-fpr <= tpr))
crossover_cutoff = thresholds[crossover_index]
crossover_specificity = 1.-fpr[crossover_index]
print("Crossover al {0:.2f} con especificidad {1:.2f}".format(crossover_cutoff, crossover_specificity))
Crossover al 0.57 con especificidad 0.91
In [ ]:
plt.plot(fpr, tpr)
plt.show()
print("El área bajo la curva ROC es igual a {0:.2f}".format(roc_auc_score(y_true, prediction_resnet)))
El área bajo la curva ROC es igual a 0.97
In [ ]:
prediction_resnet_crossover = np.where(prediction_resnet > 0.57, np.round(prediction_resnet) , np.round(prediction_resnet-0.5) )
prediction_resnet_crossover_medio = np.where(prediction_resnet > 0.49, np.round(prediction_resnet) , np.round(prediction_resnet) )

confunsion_matriz =  confusion_matrix(test_generator.classes, prediction_resnet_crossover_medio)
In [ ]:
confunsion_matriz
Out[ ]:
array([[211,  23],
       [ 30, 360]])
In [ ]:
plt.figure(figsize=(10, 7))

# Utilizar Seaborn para crear un heatmap de la matriz de confusión
sns.set(font_scale=1.2)
sns.heatmap(confunsion_matriz, annot=True, fmt='g', cmap='Blues', cbar=False, annot_kws={"size": 14})

# Agregar títulos y etiquetas
plt.title('Matriz de Confusión', fontsize=20)
plt.xlabel('Clase Predicha', fontsize=16)
plt.ylabel('Clase Verdadera', fontsize=16)

# Mostrar la gráfica
plt.show()
In [ ]:
from sklearn.metrics import classification_report
print(classification_report(test_generator.classes, prediction_resnet_crossover_medio))
              precision    recall  f1-score   support

           0       0.88      0.90      0.89       234
           1       0.94      0.92      0.93       390

    accuracy                           0.92       624
   macro avg       0.91      0.91      0.91       624
weighted avg       0.92      0.92      0.92       624

In [ ]:
figure, axis = plt.subplots(2, 2, figsize=(20,10))
axis = axis.ravel()

for i,element in enumerate(['accuracy', 'loss','precision','recall']):
    axis[i].plot(resnet50_modelo_trained.history[element])
    axis[i].plot(resnet50_modelo_trained.history['val_' + element])
    axis[i].set_title('Model {}'.format(element))
    axis[i].set_xlabel('epochs')
    axis[i].set_ylabel(element)
    axis[i].legend(['train', 'val'])

 Prediction¶

In [ ]:
resnet_modelo_importado = tf.keras.models.load_model('modelos/model_resnet.h5')

wrong_predicted_image = [[],[]]
correct_predicted_image = [[],[]]
i = 0 # el tamaño de imaganes que tenemos y el len de al lado (en nuestro caso tenemos solamente 624 imagenes en test)
# es hasta las malas que queremos
while i< 624 and len(wrong_predicted_image[0]) < 200:
    j = 0
    while j < 1 and len(wrong_predicted_image[0]) < 200:
        print(i,0,j)
        image_array = (test_generator[i][0][j]).reshape(1,300,300,3)
        
        prediction = resnet_modelo_importado.predict(image_array)
        
        if int(round(prediction[0][0])) != test_generator[i][1][j]:
            wrong_predicted_image[0].append(image_array)
            wrong_predicted_image[1].append(int(round(prediction[0][0])))
            
        elif len(correct_predicted_image[0]) < 600: # en caso de que queramos que guarda hasta un numero de correctas 
            correct_predicted_image[0].append(image_array)
            correct_predicted_image[1].append(int(round(prediction[0][0])))
#         print(len(correct_predicted_image[0]),len(wrong_predicted_image[0]))  
        j += 1
        
    i += 1
In [ ]:
print(len(correct_predicted_image[1]))
print(len(wrong_predicted_image[1]))
In [ ]:
rcParams['figure.figsize'] = 22 ,4
fig, ax = plt.subplots(1,6)

i = 0
for ele in correct_predicted_image[0]:
    image = tf.keras.preprocessing.image.array_to_img(ele.reshape(300,300,3))
    ax[i].imshow(image)
    i += 1

print(f'wrong_prediction_by_model --- {wrong_predicted_image[1]}')
In [ ]:
evaluation_resnet50 = resnet50_modelo.evaluate(test_generator,steps= 624)
624/624 [==============================] - 29s 46ms/step - loss: 0.3489 - accuracy: 0.8718 - precision: 0.8475 - recall: 0.9692
In [ ]:
print('loss rate at evaluation data :', evaluation_resnet50[0])
print('accuracy rate at evaluation data :', evaluation_resnet50[1])
print('precision rate at evaluation data :', evaluation_resnet50[2])
print('recall rate at evaluation data :', evaluation_resnet50[3])
loss rate at evaluation data : 0.34886234998703003
accuracy rate at evaluation data : 0.8717948794364929
precision rate at evaluation data : 0.847533643245697
recall rate at evaluation data : 0.9692307710647583
In [ ]:
resnet50_modelo.save('modelos_entero/resnet_0,348_0,87179,0,847533_0,969')
WARNING:absl:Found untraced functions such as _jit_compiled_convolution_op, _jit_compiled_convolution_op, _jit_compiled_convolution_op, _jit_compiled_convolution_op, _jit_compiled_convolution_op while saving (showing 5 of 53). These functions will not be directly callable after loading.
INFO:tensorflow:Assets written to: modelos_entero/resnet_0,348_0,87179,0,847533_0,969/assets
INFO:tensorflow:Assets written to: modelos_entero/resnet_0,348_0,87179,0,847533_0,969/assets

 Efficientnet¶

Modelo malo descartado

Gráficas

In [ ]:
figure, axis = plt.subplots(2, 2, figsize=(20,10))
axis = axis.ravel()

for i,element in enumerate(['accuracy', 'loss','precision','recall']):
    axis[i].plot(efficientenet_model_trained.history[element])
    axis[i].plot(efficientenet_model_trained.history['val_' + element])
    axis[i].set_title('Model {}'.format(element))
    axis[i].set_xlabel('epochs')
    axis[i].set_ylabel(element)
    axis[i].legend(['train', 'val'])
In [ ]:
figure, axis = plt.subplots(2, 2, figsize=(20,10))
axis = axis.ravel()

for i,element in enumerate(['accuracy', 'loss','precision','recall']):
    axis[i].plot(efficientenet_model_trained2.history[element])
    axis[i].plot(efficientenet_model_trained2.history['val_' + element])
    axis[i].set_title('Model {}'.format(element))
    axis[i].set_xlabel('epochs')
    axis[i].set_ylabel(element)
    axis[i].legend(['train', 'val'])
In [ ]:
efficientenet_model.evaluate(test_generator)
624/624 [==============================] - 117s 156ms/step - loss: 0.6642 - accuracy: 0.6250 - precision: 0.6250 - recall: 1.0000
Out[ ]:
[0.6641636490821838, 0.625, 0.625, 1.0]

Predicción test

In [ ]:
efficientnet_modelo_importado = tf.keras.models.load_model('modelos/model_efficientnet.h5')

wrong_predicted_image = [[],[]]
correct_predicted_image = [[],[]]
i = 0 # el tamaño de imaganes que tenemos y el len de al lado (en nuestro caso tenemos solamente 624 imagenes en test)
# es hasta las malas que queremos
while i< 624 and len(wrong_predicted_image[0]) < 200:
    j = 0
    while j < 1 and len(wrong_predicted_image[0]) < 200:
        print(i,0,j)
        image_array = (test_generator[i][0][j]).reshape(1,300,300,3)
        
        prediction = efficientnet_modelo_importado.predict(image_array)
        
        if int(round(prediction[0][0])) != test_generator[i][1][j]:
            wrong_predicted_image[0].append(image_array)
            wrong_predicted_image[1].append(int(round(prediction[0][0])))
            
        elif len(correct_predicted_image[0]) < 600: # en caso de que queramos que guarda hasta un numero de correctas 
            correct_predicted_image[0].append(image_array)
            correct_predicted_image[1].append(int(round(prediction[0][0])))
#         print(len(correct_predicted_image[0]),len(wrong_predicted_image[0]))  
        j += 1
        
    i += 1
In [ ]:
evaluation_efficient = efficientenet_model.evaluate(test_generator,steps= 624)
624/624 [==============================] - 55s 87ms/step - loss: 1.6394 - accuracy: 0.6667 - precision: 0.6522 - recall: 1.0000
In [ ]:
print('loss rate at evaluation data :', evaluation_efficient[0])
print('accuracy rate at evaluation data :', evaluation_efficient[1])
print('precision rate at evaluation data :', evaluation_efficient[2])
print('recall rate at evaluation data :', evaluation_efficient[3])
loss rate at evaluation data : 1.6393872499465942
accuracy rate at evaluation data : 0.6666666865348816
precision rate at evaluation data : 0.6521739363670349
recall rate at evaluation data : 1.0

MobileNet¶

In [ ]:
figure, axis = plt.subplots(2, 2, figsize=(20,10))
axis = axis.ravel()

for i,element in enumerate(['loss', 'binary_accuracy','mae']):
    axis[i].plot(model_mobil_trained.history[element])
    axis[i].plot(model_mobil_trained.history['val_' + element])
    axis[i].set_title('Model {}'.format(element))
    axis[i].set_xlabel('epochs')
    axis[i].set_ylabel(element)
    axis[i].legend(['train', 'val'])
In [ ]:
model_mobil.evaluate(test_generator)
2023-06-22 22:42:28.972587: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
624/624 [==============================] - 36s 41ms/step - loss: 0.2329 - binary_accuracy: 0.9407 - mae: 0.0714
Out[ ]:
[0.23293422162532806, 0.9407051205635071, 0.07139717042446136]
In [ ]:
model_mobil.save('modelos_entero/mobilenet_0,232_0,940789')
WARNING:absl:Found untraced functions such as _jit_compiled_convolution_op, _jit_compiled_convolution_op, _jit_compiled_convolution_op, _jit_compiled_convolution_op, _jit_compiled_convolution_op while saving (showing 5 of 27). These functions will not be directly callable after loading.
INFO:tensorflow:Assets written to: modelos_entero/mobilenet_0,232_0,940789/assets
INFO:tensorflow:Assets written to: modelos_entero/mobilenet_0,232_0,940789/assets
In [ ]:
mobilenetmodelo = tf.keras.models.load_model('modelos_entero/mobilenet_0,232_0,940789')
Metal device set to: Apple M1 Pro

systemMemory: 16.00 GB
maxCacheSize: 5.33 GB

2023-07-07 13:38:00.609021: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations:  SSE4.1 SSE4.2
To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.
2023-07-07 13:38:00.613303: I tensorflow/core/common_runtime/pluggable_device/pluggable_device_factory.cc:306] Could not identify NUMA node of platform GPU ID 0, defaulting to 0. Your kernel may not have been built with NUMA support.
2023-07-07 13:38:00.614010: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations:  SSE4.1 SSE4.2
To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.
2023-07-07 13:38:00.614398: I tensorflow/core/common_runtime/pluggable_device/pluggable_device_factory.cc:272] Created TensorFlow device (/job:localhost/replica:0/task:0/device:GPU:0 with 0 MB memory) -> physical PluggableDevice (device: 0, name: METAL, pci bus id: <undefined>)
In [ ]:
prediction_mobil = mobilenetmodelo.predict(test_generator2)
2023-07-07 13:38:30.246148: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
624/624 [==============================] - 13s 19ms/step
In [ ]:
y_true = (test_generator.classes ).astype("int")
fpr, tpr, thresholds = roc_curve(y_true, prediction_mobil)
plt.plot(thresholds, 1.-fpr)
plt.plot(thresholds, tpr)
plt.show()
crossover_index = np.min(np.where(1.-fpr <= tpr))
crossover_cutoff = thresholds[crossover_index]
crossover_specificity = 1.-fpr[crossover_index]
crossover_sensitivity = tpr[crossover_index]
print("Crossover al {0:.2f} con especificidad {1:.2f} y sensibilidad {2:2f}".format(crossover_cutoff, crossover_specificity, crossover_sensitivity))
Crossover al 0.66 con especificidad 0.94 y sensibilidad 0.946154
In [ ]:
plt.plot(fpr, tpr)
plt.show()
print("El área bajo la curva ROC es igual a {0:.2f}".format(roc_auc_score(y_true, prediction_mobil)))
El área bajo la curva ROC es igual a 0.98
In [ ]:
prediction_mobil_crossover = np.where(prediction_mobil > 0.95, np.round(prediction_mobil) , np.round(prediction_mobil-0.5) )

prediction_mobil_crossover_medio = np.where(prediction_mobil > 0.5, np.round(prediction_mobil) , np.round(prediction_mobil) )

confunsion_matriz =  confusion_matrix(test_generator.classes, prediction_mobil_crossover_medio)
In [ ]:
confunsion_matriz
Out[ ]:
array([[218,  16],
       [ 15, 375]])
In [ ]:
plt.figure(figsize=(10, 7))

# Utilizar Seaborn para crear un heatmap de la matriz de confusión
sns.set(font_scale=1.2)
sns.heatmap(confunsion_matriz, annot=True, fmt='g', cmap='Blues', cbar=False, annot_kws={"size": 14})

# Agregar títulos y etiquetas
plt.title('Matriz de Confusión', fontsize=20)
plt.xlabel('Clase Predicha', fontsize=16)
plt.ylabel('Clase Verdadera', fontsize=16)

# Mostrar la gráfica
plt.show()
In [ ]:
print(classification_report(test_generator.classes, prediction_mobil_crossover_medio))
              precision    recall  f1-score   support

           0       0.94      0.93      0.93       234
           1       0.96      0.96      0.96       390

    accuracy                           0.95       624
   macro avg       0.95      0.95      0.95       624
weighted avg       0.95      0.95      0.95       624

In [ ]:
#vgg_model_importado = tf.keras.models.load_model('modelos/model_vgg.h5')

wrong_predicted_image = [[],[]]
correct_predicted_image = [[],[]]
i = 0 # el tamaño de imaganes que tenemos y el len de al lado (en nuestro caso tenemos solamente 624 imagenes en test)
# es hasta las malas predichas que queremos
while i< 624 and len(wrong_predicted_image[0]) < 200:
    j = 0
    while j < 1 and len(wrong_predicted_image[0]) <200:
        #print(i,0,j)
        image_array = (test_generator2[i][0][j]).reshape(1,224,224,3)
        
        prediction3 = model_mobil.predict(image_array)
        
        if int(round(prediction3[0][0])) != test_generator2[i][1][j]:
            wrong_predicted_image[0].append(image_array)
            wrong_predicted_image[1].append(int(round(prediction3[0][0])))
            
        elif len(correct_predicted_image[0]) < 600:
            correct_predicted_image[0].append(image_array)
            correct_predicted_image[1].append(int(round(prediction3[0][0])))
#         print(len(correct_predicted_image[0]),len(wrong_predicted_image[0]))  
        j += 1
        
    i += 1
2023-06-22 23:38:52.180291: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:114] Plugin optimizer for device_type GPU is enabled.
1/1 [==============================] - 7s 7s/step
1/1 [==============================] - 2s 2s/step
1/1 [==============================] - 0s 49ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 48ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 48ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 60ms/step
1/1 [==============================] - 0s 48ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 53ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 51ms/step
1/1 [==============================] - 0s 48ms/step
1/1 [==============================] - 0s 52ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 50ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 49ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 50ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 49ms/step
1/1 [==============================] - 0s 54ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 1s 1s/step
1/1 [==============================] - 0s 70ms/step
1/1 [==============================] - 0s 56ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 48ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 51ms/step
1/1 [==============================] - 0s 48ms/step
1/1 [==============================] - 0s 49ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 49ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 50ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 52ms/step
1/1 [==============================] - 0s 49ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 48ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 55ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 51ms/step
1/1 [==============================] - 0s 51ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 67ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 48ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 48ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 48ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 48ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 53ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 49ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 39ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 48ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 39ms/step
1/1 [==============================] - 0s 39ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 39ms/step
1/1 [==============================] - 0s 39ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 39ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 51ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 51ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 51ms/step
1/1 [==============================] - 0s 48ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 48ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 48ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 48ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 50ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 67ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 51ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 48ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 60ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 66ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 49ms/step
1/1 [==============================] - 0s 49ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 50ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 56ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 75ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 41ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 52ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 46ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 45ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 79ms/step
1/1 [==============================] - 0s 40ms/step
1/1 [==============================] - 0s 44ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 54ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 77ms/step
1/1 [==============================] - 0s 69ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 42ms/step
1/1 [==============================] - 0s 63ms/step
1/1 [==============================] - 0s 44ms/step

Estudio de interpretabilidad para todos las primeras x imagenes correctamente predichas

In [ ]:
rcParams['figure.figsize'] = 80 ,40

print(f'correct_predicted_image --- {correct_predicted_image[1]}')

for row in range(2):
    for col in range(2):
        index = row * 225 + col  # Índice de la imagen en wrong_predicted_image
        if index < len(correct_predicted_image[0]):
            ele = correct_predicted_image[0][index+10]
            image = tf.keras.preprocessing.image.array_to_img(ele.reshape(224, 224, 3))
            shap_values = explainer.shap_values(correct_predicted_image[0][index+10])
            shap.image_plot(shap_values, correct_predicted_image[0][index+10])
        else:
            # Si no hay suficientes imágenes, se muestra un espacio en blanco
            ax[row][col].axis('off')
plt.tight_layout()
plt.show()
correct_predicted_image --- [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
<Figure size 8000x4000 with 0 Axes>
In [ ]:
# Modelo 2

modelo_propio3 = Sequential()

modelo_propio3.add(Conv2D(filters=16, kernel_size=(5, 5), input_shape=(300, 300, 3), activation='relu'))
modelo_propio3.add(MaxPooling2D(pool_size=(3, 3)))

modelo_propio3.add(Conv2D(filters=32, kernel_size=(5, 5), activation='relu'))
#modelo_propio3.add(MaxPooling2D(pool_size=(3, 3)))

modelo_propio3.add(Flatten())
modelo_propio3.add(Dropout(0.2))
modelo_propio3.add(Dense(256, activation='relu'))


modelo_propio3.add(Dense(1, activation='sigmoid'))

modelo_propio3.compile(loss=tf.losses.BinaryCrossentropy(), 
              optimizer=tf.keras.optimizers.Adam(learning_rate=0.001), 
              metrics=['accuracy', tf.keras.metrics.Precision(name='precision'),
        tf.keras.metrics.Recall(name='recall')])
In [ ]:
print(prediction_mobil[10])
print(prediction_mobil[11])
print(prediction_mobil[235])
print(prediction_mobil[236])
[2.239651e-08]
[0.00027984]
[0.99999917]
[0.9999969]
In [ ]:
rcParams['figure.figsize'] = 80 ,40

print(f'correct_predicted_image --- {correct_predicted_image[1]}')
fig, ax = plt.subplots(2, 2) # filas y columnas

for row in range(2):
    for col in range(2):
        index = row * 225 + col  # Índice de la imagen en wrong_predicted_image
        if index < len(correct_predicted_image[0]):
            print(index+10)
            ele = correct_predicted_image[0][index+10]
            image = tf.keras.preprocessing.image.array_to_img(ele.reshape(224, 224, 3))
            ax[row][col].imshow(image)
            ax[row][col].set_title('Predicción: 'f'{correct_predicted_image[1][index+10]}', fontsize=100)
        else:
            # Si no hay suficientes imágenes, se muestra un espacio en blanco
            ax[row][col].axis('off')
plt.tight_layout()
plt.show()
correct_predicted_image --- [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
10
11
235
236
In [ ]:
rcParams['figure.figsize'] = 80 ,40

print(f'wrong_prediction_by_model --- {wrong_predicted_image[1]}')
fig, ax = plt.subplots(2, 2) # filas y columnas

for row in range(2):
    for col in range(2):
        index = row * 20 + col  # Índice de la imagen en wrong_predicted_image
        if index < len(wrong_predicted_image[0]):
            ele = wrong_predicted_image[0][index]
            image = tf.keras.preprocessing.image.array_to_img(ele.reshape(224, 224, 3))
            ax[row][col].imshow(image)
            ax[row][col].set_title('Predicción: 'f'{wrong_predicted_image[1][index]}', fontsize=100)
        else:
            # Si no hay suficientes imágenes, se muestra un espacio en blanco
            ax[row][col].axis('off')
plt.tight_layout()
plt.show()
wrong_prediction_by_model --- [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
In [ ]:
rcParams['figure.figsize'] = 80 ,40

print(f'wrong_prediction_by_model --- {wrong_predicted_image[1]}')
fig, ax = plt.subplots(2, 2) # filas y columnas

for row in range(2):
    for col in range(2):
        index = row * 20 + col  # Índice de la imagen en wrong_predicted_image
        if index < len(wrong_predicted_image[0]):
            ele = wrong_predicted_image[0][index]
            image = tf.keras.preprocessing.image.array_to_img(ele.reshape(224, 224, 3))
            shap_values = explainer.shap_values(wrong_predicted_image[0][index])
            shap.image_plot(shap_values, wrong_predicted_image[0][index])
            #ax[row][col].imshow(image)
            #ax[row][col].set_title('Etiqueta: 'f'{wrong_predicted_image[1][index]}', fontsize=100)
        else:
            # Si no hay suficientes imágenes, se muestra un espacio en blanco
            ax[row][col].axis('off')
plt.tight_layout()
plt.show()
wrong_prediction_by_model --- [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
<Figure size 8000x4000 with 0 Axes>
In [ ]:
len(wrong_predicted_image[1])
Out[ ]:
31
In [ ]:
for i, buenos in enumerate(wrong_predicted_image[0]):
    image = tf.keras.preprocessing.image.array_to_img(buenos.reshape(224,224,3))
    plt.imshow(image)
    plt.axis('off')  # Para ocultar los ejes
    
    # Agregar título con la etiqueta correspondiente
    etiqueta = wrong_predicted_image[1][i]
    plt.title(f'Etiqueta predicha: {etiqueta}', fontsize=10)
    
    plt.show()